diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 298c808a55..fda1bb81d1 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -11,24 +11,47 @@ on: jobs: test: - runs-on: ubuntu-latest + strategy: + matrix: + include: + - os: ubuntu-latest + browser: chrome + # - os: windows-latest + # browser: chrome + + runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 + - name: Use Node.js 20.x - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: 20.x + + - name: Verify Chrome (Ubuntu) + if: matrix.os == 'ubuntu-latest' && matrix.browser == 'chrome' + run: | + google-chrome --version + + - name: Verify Chrome (Windows) + if: matrix.os == 'windows-latest' && matrix.browser == 'chrome' + run: | + & "C:\Program Files\Google\Chrome\Application\chrome.exe" --version + - name: Get node modules run: npm ci env: CI: true - - name: build and test + + - name: Build and test (Ubuntu) id: test - run: npm test + if: matrix.os == 'ubuntu-latest' + run: npm test -- --project=unit-tests continue-on-error: true env: CI: true + - name: Generate Visual Test Report if: always() run: node visual-report.js diff --git a/preview/index.html b/preview/index.html index d0f3b329ae..4092992316 100644 --- a/preview/index.html +++ b/preview/index.html @@ -18,29 +18,84 @@ - \ No newline at end of file + diff --git a/src/accessibility/gridOutput.js b/src/accessibility/gridOutput.js index a74ebe8966..21b900b24e 100644 --- a/src/accessibility/gridOutput.js +++ b/src/accessibility/gridOutput.js @@ -10,7 +10,7 @@ function gridOutput(p5, fn){ //updates gridOutput fn._updateGridOutput = function(idT) { - if (this._renderer && this._renderer instanceof p5.RendererGL) { + if (this._renderer && this._renderer.isP3D) { if (!this._didOutputGridWebGLMessage) { this._didOutputGridWebGLMessage = true; console.error('gridOutput() does not yet work in WebGL mode.'); diff --git a/src/accessibility/textOutput.js b/src/accessibility/textOutput.js index c1f166edae..971718fc26 100644 --- a/src/accessibility/textOutput.js +++ b/src/accessibility/textOutput.js @@ -10,7 +10,7 @@ function textOutput(p5, fn){ //updates textOutput fn._updateTextOutput = function(idT) { - if (this._renderer && this._renderer instanceof p5.RendererGL) { + if (this._renderer && this._renderer.isP3D) { if (!this._didOutputTextWebGLMessage) { this._didOutputTextWebGLMessage = true; console.error('textOutput() does not yet work in WebGL mode.'); diff --git a/src/core/constants.js b/src/core/constants.js index c203106730..e3a7e30d2e 100644 --- a/src/core/constants.js +++ b/src/core/constants.js @@ -72,6 +72,13 @@ export const WEBGL = 'webgl'; */ export const WEBGL2 = 'webgl2'; +/** + * A constant used for creating a WebGPU rendering context + * @property {'webgpu'} WEBGPU + * @final + */ +export const WEBGPU = 'webgpu'; + // ENVIRONMENT /** * @typedef {'default'} ARROW diff --git a/src/core/main.js b/src/core/main.js index 2feea320ca..fe8076ca9a 100644 --- a/src/core/main.js +++ b/src/core/main.js @@ -87,6 +87,7 @@ class p5 { this._curElement = null; this._elements = []; this._glAttributes = null; + this._webgpuAttributes = null; this._requestAnimId = 0; this._isGlobal = false; this._loop = true; diff --git a/src/core/p5.Renderer3D.js b/src/core/p5.Renderer3D.js new file mode 100644 index 0000000000..bbf42b330c --- /dev/null +++ b/src/core/p5.Renderer3D.js @@ -0,0 +1,1730 @@ +import * as constants from "../core/constants"; +import { Graphics } from "../core/p5.Graphics"; +import { Renderer } from './p5.Renderer'; +import GeometryBuilder from "../webgl/GeometryBuilder"; +import { Matrix } from "../math/p5.Matrix"; +import { Camera } from "../webgl/p5.Camera"; +import { Vector } from "../math/p5.Vector"; +import { ShapeBuilder } from "../webgl/ShapeBuilder"; +import { GeometryBufferCache } from "../webgl/GeometryBufferCache"; +import { filterParamDefaults } from "../image/const"; +import { PrimitiveToVerticesConverter } from "../shape/custom_shapes"; +import { Color } from "../color/p5.Color"; +import { Element } from "../dom/p5.Element"; +import { Framebuffer } from "../webgl/p5.Framebuffer"; +import { DataArray } from "../webgl/p5.DataArray"; +import { RenderBuffer } from "../webgl/p5.RenderBuffer"; +import { Image } from "../image/p5.Image"; +import { Texture } from "../webgl/p5.Texture"; + +export function getStrokeDefs(shaderConstant) { + const STROKE_CAP_ENUM = {}; + const STROKE_JOIN_ENUM = {}; + let lineDefs = ""; + const defineStrokeCapEnum = function (key, val) { + lineDefs += shaderConstant(`STROKE_CAP_${key}`, `${val}`, 'u32'); + STROKE_CAP_ENUM[constants[key]] = val; + }; + const defineStrokeJoinEnum = function (key, val) { + lineDefs += shaderConstant(`STROKE_JOIN_${key}`, `${val}`, 'u32'); + STROKE_JOIN_ENUM[constants[key]] = val; + }; + + // Define constants in line shaders for each type of cap/join, and also record + // the values in JS objects + defineStrokeCapEnum("ROUND", 0); + defineStrokeCapEnum("PROJECT", 1); + defineStrokeCapEnum("SQUARE", 2); + defineStrokeJoinEnum("ROUND", 0); + defineStrokeJoinEnum("MITER", 1); + defineStrokeJoinEnum("BEVEL", 2); + + return { STROKE_CAP_ENUM, STROKE_JOIN_ENUM, lineDefs }; +} + +const { STROKE_CAP_ENUM, STROKE_JOIN_ENUM } = getStrokeDefs(()=>""); + +export class Renderer3D extends Renderer { + constructor(pInst, w, h, isMainCanvas, elt) { + super(pInst, w, h, isMainCanvas); + + // Create new canvas + this.canvas = this.elt = elt || document.createElement("canvas"); + this.contextReady = this.setupContext(); + + if (this._isMainCanvas) { + // for pixel method sharing with pimage + this._pInst._curElement = this; + this._pInst.canvas = this.canvas; + } else { + // hide if offscreen buffer by default + this.canvas.style.display = "none"; + } + this.elt.id = "defaultCanvas0"; + this.elt.classList.add("p5Canvas"); + + // Set and return p5.Element + this.wrappedElt = new Element(this.elt, this._pInst); + + // Extend renderer with methods of p5.Element with getters + for (const p of Object.getOwnPropertyNames(Element.prototype)) { + if (p !== 'constructor' && p[0] !== '_') { + Object.defineProperty(this, p, { + get() { + return this.wrappedElt[p]; + } + }) + } + } + + const dimensions = this._adjustDimensions(w, h); + w = dimensions.adjustedWidth; + h = dimensions.adjustedHeight; + + this.width = w; + this.height = h; + + // Set canvas size + this.elt.width = w * this._pixelDensity; + this.elt.height = h * this._pixelDensity; + this.elt.style.width = `${w}px`; + this.elt.style.height = `${h}px`; + this._updateViewport(); + + // Attach canvas element to DOM + if (this._pInst._userNode) { + // user input node case + this._pInst._userNode.appendChild(this.elt); + } else { + //create main element + if (document.getElementsByTagName("main").length === 0) { + let m = document.createElement("main"); + document.body.appendChild(m); + } + //append canvas to main + document.getElementsByTagName("main")[0].appendChild(this.elt); + } + + this.isP3D = true; //lets us know we're in 3d mode + + // When constructing a new Geometry, this will represent the builder + this.geometryBuilder = undefined; + + // Push/pop state + this.states.uModelMatrix = new Matrix(4); + this.states.uViewMatrix = new Matrix(4); + this.states.uPMatrix = new Matrix(4); + + this.states.curCamera = new Camera(this); + this.states.uPMatrix.set(this.states.curCamera.projMatrix); + this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); + + this.states.enableLighting = false; + this.states.ambientLightColors = []; + this.states.specularColors = [1, 1, 1]; + this.states.directionalLightDirections = []; + this.states.directionalLightDiffuseColors = []; + this.states.directionalLightSpecularColors = []; + this.states.pointLightPositions = []; + this.states.pointLightDiffuseColors = []; + this.states.pointLightSpecularColors = []; + this.states.spotLightPositions = []; + this.states.spotLightDirections = []; + this.states.spotLightDiffuseColors = []; + this.states.spotLightSpecularColors = []; + this.states.spotLightAngle = []; + this.states.spotLightConc = []; + this.states.activeImageLight = null; + + this.states.curFillColor = [1, 1, 1, 1]; + this.states.curAmbientColor = [1, 1, 1, 1]; + this.states.curSpecularColor = [0, 0, 0, 0]; + this.states.curEmissiveColor = [0, 0, 0, 0]; + this.states.curStrokeColor = [0, 0, 0, 1]; + + this.states.curBlendMode = constants.BLEND; + + this.states._hasSetAmbient = false; + this.states._useSpecularMaterial = false; + this.states._useEmissiveMaterial = false; + this.states._useNormalMaterial = false; + this.states._useShininess = 1; + this.states._useMetalness = 0; + + this.states.tint = [255, 255, 255, 255]; + + this.states.constantAttenuation = 1; + this.states.linearAttenuation = 0; + this.states.quadraticAttenuation = 0; + + this.states._currentNormal = new Vector(0, 0, 1); + + this.states.drawMode = constants.FILL; + + this.states._tex = null; + this.states.textureMode = constants.IMAGE; + this.states.textureWrapX = constants.CLAMP; + this.states.textureWrapY = constants.CLAMP; + + // erasing + this._isErasing = false; + + // simple lines + this._simpleLines = false; + + // clipping + this._clipDepths = []; + this._isClipApplied = false; + this._stencilTestOn = false; + + this.mixedAmbientLight = []; + this.mixedSpecularColor = []; + + // p5.framebuffer for this are calculated in getDiffusedTexture function + this.diffusedTextures = new Map(); + // p5.framebuffer for this are calculated in getSpecularTexture function + this.specularTextures = new Map(); + + this.preEraseBlend = undefined; + this._cachedFillStyle = [1, 1, 1, 1]; + this._cachedStrokeStyle = [0, 0, 0, 1]; + this._isBlending = false; + + this._useLineColor = false; + this._useVertexColor = false; + + this.registerEnabled = new Set(); + + // Camera + this.states.curCamera._computeCameraDefaultSettings(); + this.states.curCamera._setDefaultCamera(); + + // FilterCamera + this.filterCamera = new Camera(this); + this.filterCamera._computeCameraDefaultSettings(); + this.filterCamera._setDefaultCamera(); + // Information about the previous frame's touch object + // for executing orbitControl() + this.prevTouches = []; + // Velocity variable for use with orbitControl() + this.zoomVelocity = 0; + this.rotateVelocity = new Vector(0, 0); + this.moveVelocity = new Vector(0, 0); + // Flags for recording the state of zooming, rotation and moving + this.executeZoom = false; + this.executeRotateAndMove = false; + + this._drawingFilter = false; + this._drawingImage = false; + + this.specularShader = undefined; + this.sphereMapping = undefined; + this.diffusedShader = undefined; + this._baseFilterShader = undefined; + this._defaultLightShader = undefined; + this._defaultImmediateModeShader = undefined; + this._defaultNormalShader = undefined; + this._defaultColorShader = undefined; + this._defaultPointShader = undefined; + + this.states.userFillShader = undefined; + this.states.userStrokeShader = undefined; + this.states.userPointShader = undefined; + this.states.userImageShader = undefined; + + this.states.curveDetail = 1 / 4; + + // Used by beginShape/endShape functions to construct a p5.Geometry + this.shapeBuilder = new ShapeBuilder(this); + + this.geometryBufferCache = new GeometryBufferCache(this); + + this.curStrokeCap = constants.ROUND; + this.curStrokeJoin = constants.ROUND; + + // map of texture sources to textures created in this gl context via this.getTexture(src) + this.textures = new Map(); + + // set of framebuffers in use + this.framebuffers = new Set(); + // stack of active framebuffers + this.activeFramebuffers = []; + + // for post processing step + this.states.filterShader = undefined; + this.filterLayer = undefined; + this.filterLayerTemp = undefined; + this.defaultFilterShaders = {}; + + this.fontInfos = {}; + + this._curShader = undefined; + this.drawShapeCount = 1; + + this.scratchMat3 = new Matrix(3); + + // Whether or not to remove degenerate faces from geometry. This is usually + // set to false for performance. + this._validateFaces = false; + + this.buffers = { + fill: [ + new RenderBuffer( + 3, + "vertices", + "vertexBuffer", + "aPosition", + this, + this._vToNArray + ), + new RenderBuffer( + 3, + "vertexNormals", + "normalBuffer", + "aNormal", + this, + this._vToNArray + ), + new RenderBuffer( + 4, + "vertexColors", + "colorBuffer", + "aVertexColor", + this + ).default((geometry) => geometry.vertices.flatMap(() => [-1, -1, -1, -1])), + new RenderBuffer( + 3, + "vertexAmbients", + "ambientBuffer", + "aAmbientColor", + this + ), + new RenderBuffer(2, "uvs", "uvBuffer", "aTexCoord", this, (arr) => + arr.flat() + ), + ], + stroke: [ + new RenderBuffer( + 4, + "lineVertexColors", + "lineColorBuffer", + "aVertexColor", + this + ).default((geometry) => geometry.lineVertices.flatMap(() => [-1, -1, -1, -1])), + new RenderBuffer( + 3, + "lineVertices", + "lineVerticesBuffer", + "aPosition", + this + ), + new RenderBuffer( + 3, + "lineTangentsIn", + "lineTangentsInBuffer", + "aTangentIn", + this + ), + new RenderBuffer( + 3, + "lineTangentsOut", + "lineTangentsOutBuffer", + "aTangentOut", + this + ), + new RenderBuffer(1, "lineSides", "lineSidesBuffer", "aSide", this), + ], + text: [ + new RenderBuffer( + 3, + "vertices", + "vertexBuffer", + "aPosition", + this, + this._vToNArray + ), + new RenderBuffer(2, "uvs", "uvBuffer", "aTexCoord", this, (arr) => + arr.flat() + ), + ], + user: [], + }; + } + + //This is helper function to reset the context anytime the attributes + //are changed with setAttributes() + + async _resetContext(options, callback, ctor = Renderer3D) { + const w = this.width; + const h = this.height; + const defaultId = this.canvas.id; + const isPGraphics = this._pInst instanceof Graphics; + + // Preserve existing position and styles before recreation + const prevStyle = { + position: this.canvas.style.position, + top: this.canvas.style.top, + left: this.canvas.style.left, + }; + + if (isPGraphics) { + // Handle PGraphics: remove and recreate the canvas + const pg = this._pInst; + pg.canvas.parentNode.removeChild(pg.canvas); + pg.canvas = document.createElement("canvas"); + const node = pg._pInst._userNode || document.body; + node.appendChild(pg.canvas); + Element.call(pg, pg.canvas, pg._pInst); + // Restore previous width and height + pg.width = w; + pg.height = h; + } else { + // Handle main canvas: remove and recreate it + let c = this.canvas; + if (c) { + c.parentNode.removeChild(c); + } + c = document.createElement("canvas"); + c.id = defaultId; + // Attach the new canvas to the correct parent node + if (this._pInst._userNode) { + this._pInst._userNode.appendChild(c); + } else { + document.body.appendChild(c); + } + this._pInst.canvas = c; + this.canvas = c; + + // Restore the saved position + this.canvas.style.position = prevStyle.position; + this.canvas.style.top = prevStyle.top; + this.canvas.style.left = prevStyle.left; + } + + const renderer = new ctor( + this._pInst, + w, + h, + !isPGraphics, + this._pInst.canvas + ); + this._pInst._renderer = renderer; + + renderer._applyDefaults(); + + if (renderer.contextReady) { + await renderer.contextReady + } + + if (typeof callback === "function") { + //setTimeout with 0 forces the task to the back of the queue, this ensures that + //we finish switching out the renderer + setTimeout(() => { + callback.apply(window._renderer, options); + }, 0); + } + } + + remove() { + this.wrappedElt.remove(); + this.wrappedElt = null; + this.canvas = null; + this.elt = null; + } + + ////////////////////////////////////////////// + // Geometry Building + ////////////////////////////////////////////// + + /** + * Starts creating a new p5.Geometry. Subsequent shapes drawn will be added + * to the geometry and then returned when + * endGeometry() is called. One can also use + * buildGeometry() to pass a function that + * draws shapes. + * + * If you need to draw complex shapes every frame which don't change over time, + * combining them upfront with `beginGeometry()` and `endGeometry()` and then + * drawing that will run faster than repeatedly drawing the individual pieces. + * @private + */ + beginGeometry() { + if (this.geometryBuilder) { + throw new Error( + "It looks like `beginGeometry()` is being called while another p5.Geometry is already being build." + ); + } + this.geometryBuilder = new GeometryBuilder(this); + this.geometryBuilder.prevFillColor = this.states.fillColor; + this.fill(new Color([-1, -1, -1, -1])); + } + + /** + * Finishes creating a new p5.Geometry that was + * started using beginGeometry(). One can also + * use buildGeometry() to pass a function that + * draws shapes. + * @private + * + * @returns {p5.Geometry} The model that was built. + */ + endGeometry() { + if (!this.geometryBuilder) { + throw new Error( + "Make sure you call beginGeometry() before endGeometry()!" + ); + } + const geometry = this.geometryBuilder.finish(); + this.fill(this.geometryBuilder.prevFillColor); + this.geometryBuilder = undefined; + return geometry; + } + + /** + * Creates a new p5.Geometry that contains all + * the shapes drawn in a provided callback function. The returned combined shape + * can then be drawn all at once using model(). + * + * If you need to draw complex shapes every frame which don't change over time, + * combining them with `buildGeometry()` once and then drawing that will run + * faster than repeatedly drawing the individual pieces. + * + * One can also draw shapes directly between + * beginGeometry() and + * endGeometry() instead of using a callback + * function. + * @param {Function} callback A function that draws shapes. + * @returns {p5.Geometry} The model that was built from the callback function. + */ + buildGeometry(callback) { + this.beginGeometry(); + callback(); + return this.endGeometry(); + } + + ////////////////////////////////////////////// + // Shape drawing + ////////////////////////////////////////////// + + beginShape(...args) { + super.beginShape(...args); + // TODO remove when shape refactor is complete + // this.shapeBuilder.beginShape(...args); + } + + curveDetail(d) { + if (d === undefined) { + return this.states.curveDetail; + } else { + this.states.setValue("curveDetail", d); + } + } + + drawShape(shape) { + const visitor = new PrimitiveToVerticesConverter({ + curveDetail: this.states.curveDetail, + }); + shape.accept(visitor); + this.shapeBuilder.constructFromContours(shape, visitor.contours); + + if (this.geometryBuilder) { + this.geometryBuilder.addImmediate( + this.shapeBuilder.geometry, + this.shapeBuilder.shapeMode, + { validateFaces: this._validateFaces } + ); + } else if (this.states.fillColor || this.states.strokeColor) { + // TODO: handle POINTS mode + this._drawGeometry(this.shapeBuilder.geometry, { + mode: this.shapeBuilder.shapeMode, + count: this.drawShapeCount, + }); + } + this.drawShapeCount = 1; + } + + endShape(mode, count) { + this.drawShapeCount = count; + super.endShape(mode, count); + } + + vertexProperty(...args) { + this.currentShape.vertexProperty(...args); + } + + normal(xorv, y, z) { + if (xorv instanceof Vector) { + this.states.setValue("_currentNormal", xorv); + } else { + this.states.setValue("_currentNormal", new Vector(xorv, y, z)); + } + this.updateShapeVertexProperties(); + } + + model(model, count = 1) { + if (model.vertices.length > 0) { + if (this.geometryBuilder) { + this.geometryBuilder.addRetained(model); + } else { + if (!this.geometryInHash(model.gid)) { + model._edgesToVertices(); + this._getOrMakeCachedBuffers(model); + } + + this._drawGeometry(model, { count }); + } + } + } + + _getOrMakeCachedBuffers(geometry) { + return this.geometryBufferCache.ensureCached(geometry); + } + + ////////////////////////////////////////////// + // Rendering + ////////////////////////////////////////////// + + _drawGeometry(geometry, { mode = constants.TRIANGLES, count = 1 } = {}) { + for (const propName in geometry.userVertexProperties) { + const prop = geometry.userVertexProperties[propName]; + this.buffers.user.push( + new RenderBuffer( + prop.getDataSize(), + prop.getSrcName(), + prop.getDstName(), + prop.getName(), + this + ) + ); + } + + if ( + this.states.fillColor && + geometry.vertices.length >= 3 && + ![constants.LINES, constants.POINTS].includes(mode) + ) { + this._drawFills(geometry, { mode, count }); + } + + if (this.states.strokeColor && geometry.lineVertices.length >= 1) { + this._drawStrokes(geometry, { count }); + } + + this.buffers.user = []; + } + + _drawFills(geometry, { count, mode } = {}) { + this._useVertexColor = geometry.vertexColors.length > 0 && + !geometry.vertexColors.isDefault; + + const shader = + !this._drawingFilter && this.states.userFillShader + ? this.states.userFillShader + : this._getFillShader(); + shader.bindShader('fill'); + this._setGlobalUniforms(shader); + this._setFillUniforms(shader); + shader.bindTextures(); + + for (const buff of this.buffers.fill) { + buff._prepareBuffer(geometry, shader); + } + this._prepareUserAttributes(geometry, shader); + this._disableRemainingAttributes(shader); + + this._applyColorBlend( + this.states.curFillColor, + geometry.hasFillTransparency() + ); + + this._drawBuffers(geometry, { mode, count }); + + shader.unbindShader(); + } + + _drawStrokes(geometry, { count } = {}) { + + this._useLineColor = geometry.vertexStrokeColors.length > 0; + + const shader = this._getStrokeShader(); + shader.bindShader('stroke'); + this._setGlobalUniforms(shader); + this._setStrokeUniforms(shader); + shader.bindTextures(); + + for (const buff of this.buffers.stroke) { + buff._prepareBuffer(geometry, shader); + } + this._prepareUserAttributes(geometry, shader); + this._disableRemainingAttributes(shader); + + this._applyColorBlend( + this.states.curStrokeColor, + geometry.hasStrokeTransparency() + ); + + this._drawBuffers(geometry, {count}) + + shader.unbindShader(); + } + + _prepareUserAttributes(geometry, shader) { + for (const buff of this.buffers.user) { + if (!this._pInst.constructor.disableFriendleErrors) { + // Check for the right data size + const prop = geometry.userVertexProperties[buff.attr]; + if (prop) { + const adjustedLength = prop.getSrcArray().length / prop.getDataSize(); + if (adjustedLength > geometry.vertices.length) { + this._pInst.constructor._friendlyError( + `One of the geometries has a custom vertex property '${prop.getName()}' with more values than vertices. This is probably caused by directly using the Geometry.vertexProperty() method.`, + "vertexProperty()" + ); + } else if (adjustedLength < geometry.vertices.length) { + this._pInst.constructor._friendlyError( + `One of the geometries has a custom vertex property '${prop.getName()}' with fewer values than vertices. This is probably caused by directly using the Geometry.vertexProperty() method.`, + "vertexProperty()" + ); + } + } + } + buff._prepareBuffer(geometry, shader); + } + } + + _drawGeometryScaled(model, scaleX, scaleY, scaleZ) { + let originalModelMatrix = this.states.uModelMatrix; + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + try { + this.states.uModelMatrix.scale(scaleX, scaleY, scaleZ); + + if (this.geometryBuilder) { + this.geometryBuilder.addRetained(model); + } else { + this._drawGeometry(model); + } + } finally { + this.states.setValue("uModelMatrix", originalModelMatrix); + } + } + + _update() { + // reset model view and apply initial camera transform + // (containing only look at info; no projection). + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + this.states.uModelMatrix.reset(); + this.states.setValue("uViewMatrix", this.states.uViewMatrix.clone()); + this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); + + // reset light data for new frame. + + this.states.setValue("ambientLightColors", []); + this.states.setValue("specularColors", [1, 1, 1]); + + this.states.setValue("directionalLightDirections", []); + this.states.setValue("directionalLightDiffuseColors", []); + this.states.setValue("directionalLightSpecularColors", []); + + this.states.setValue("pointLightPositions", []); + this.states.setValue("pointLightDiffuseColors", []); + this.states.setValue("pointLightSpecularColors", []); + + this.states.setValue("spotLightPositions", []); + this.states.setValue("spotLightDirections", []); + this.states.setValue("spotLightDiffuseColors", []); + this.states.setValue("spotLightSpecularColors", []); + this.states.setValue("spotLightAngle", []); + this.states.setValue("spotLightConc", []); + + this.states.setValue("enableLighting", false); + + //reset tint value for new frame + this.states.setValue("tint", [255, 255, 255, 255]); + + //Clear depth every frame + this._resetBuffersBeforeDraw() + } + + background(...args) { + const _col = this._pInst.color(...args); + this.clear(..._col._getRGBA()); + } + + ////////////////////////////////////////////// + // Positioning + ////////////////////////////////////////////// + + get uModelMatrix() { + return this.states.uModelMatrix; + } + + get uViewMatrix() { + return this.states.uViewMatrix; + } + + get uPMatrix() { + return this.states.uPMatrix; + } + + get uMVMatrix() { + const m = this.uModelMatrix.copy(); + m.mult(this.uViewMatrix); + return m; + } + + /** + * Get a matrix from world-space to screen-space + */ + getWorldToScreenMatrix() { + const modelMatrix = this.states.uModelMatrix; + const viewMatrix = this.states.uViewMatrix; + const projectionMatrix = this.states.uPMatrix; + const projectedToScreenMatrix = new Matrix(4); + projectedToScreenMatrix.scale(this.width, this.height, 1); + projectedToScreenMatrix.translate([0.5, 0.5, 0.5]); + projectedToScreenMatrix.scale(0.5, -0.5, 0.5); + + const modelViewMatrix = modelMatrix.copy().mult(viewMatrix); + const modelViewProjectionMatrix = modelViewMatrix.mult(projectionMatrix); + const worldToScreenMatrix = modelViewProjectionMatrix.mult(projectedToScreenMatrix); + return worldToScreenMatrix; + } + + ////////////////////////////////////////////// + // COLOR + ////////////////////////////////////////////// + /** + * Basic fill material for geometry with a given color + * @param {Number|Number[]|String|p5.Color} v1 gray value, + * red or hue value (depending on the current color mode), + * or color Array, or CSS color string + * @param {Number} [v2] green or saturation value + * @param {Number} [v3] blue or brightness value + * @param {Number} [a] opacity + * @chainable + * @example + *
+ * + * function setup() { + * createCanvas(200, 200, WEBGL); + * } + * + * function draw() { + * background(0); + * noStroke(); + * fill(100, 100, 240); + * rotateX(frameCount * 0.01); + * rotateY(frameCount * 0.01); + * box(75, 75, 75); + * } + * + *
+ * + * @alt + * black canvas with purple cube spinning + */ + fill(...args) { + super.fill(...args); + //see material.js for more info on color blending in webgl + // const color = fn.color.apply(this._pInst, arguments); + const color = this.states.fillColor; + this.states.setValue("curFillColor", color._array); + this.states.setValue("drawMode", constants.FILL); + this.states.setValue("_useNormalMaterial", false); + this.states.setValue("_tex", null); + } + + /** + * Basic stroke material for geometry with a given color + * @param {Number|Number[]|String|p5.Color} v1 gray value, + * red or hue value (depending on the current color mode), + * or color Array, or CSS color string + * @param {Number} [v2] green or saturation value + * @param {Number} [v3] blue or brightness value + * @param {Number} [a] opacity + * @example + *
+ * + * function setup() { + * createCanvas(200, 200, WEBGL); + * } + * + * function draw() { + * background(0); + * stroke(240, 150, 150); + * fill(100, 100, 240); + * rotateX(frameCount * 0.01); + * rotateY(frameCount * 0.01); + * box(75, 75, 75); + * } + * + *
+ * + * @alt + * black canvas with purple cube with pink outline spinning + */ + stroke(...args) { + super.stroke(...args); + // const color = fn.color.apply(this._pInst, arguments); + this.states.setValue("curStrokeColor", this.states.strokeColor._array); + } + + getCommonVertexProperties() { + return { + ...super.getCommonVertexProperties(), + stroke: this.states.strokeColor, + fill: this.states.fillColor, + normal: this.states._currentNormal, + }; + } + + getSupportedIndividualVertexProperties() { + return { + textureCoordinates: true, + }; + } + + strokeCap(cap) { + this.curStrokeCap = cap; + } + + strokeJoin(join) { + this.curStrokeJoin = join; + } + getFilterLayer() { + if (!this.filterLayer) { + this.filterLayer = new Framebuffer(this); + } + return this.filterLayer; + } + getFilterLayerTemp() { + if (!this.filterLayerTemp) { + this.filterLayerTemp = new Framebuffer(this); + } + return this.filterLayerTemp; + } + matchSize(fboToMatch, target) { + if ( + fboToMatch.width !== target.width || + fboToMatch.height !== target.height + ) { + fboToMatch.resize(target.width, target.height); + } + + if (fboToMatch.pixelDensity() !== target.pixelDensity()) { + fboToMatch.pixelDensity(target.pixelDensity()); + } + } + filter(...args) { + let fbo = this.getFilterLayer(); + + // use internal shader for filter constants BLUR, INVERT, etc + let filterParameter = undefined; + let operation = undefined; + if (typeof args[0] === "string") { + operation = args[0]; + let useDefaultParam = + operation in filterParamDefaults && args[1] === undefined; + filterParameter = useDefaultParam + ? filterParamDefaults[operation] + : args[1]; + + // Create and store shader for constants once on initial filter call. + // Need to store multiple in case user calls different filters, + // eg. filter(BLUR) then filter(GRAY) + if (!(operation in this.defaultFilterShaders)) { + this.defaultFilterShaders[operation] = this._makeFilterShader(fbo.renderer, operation); + } + this.states.setValue( + "filterShader", + this.defaultFilterShaders[operation] + ); + } + // use custom user-supplied shader + else { + this.states.setValue("filterShader", args[0]); + } + + // Setting the target to the framebuffer when applying a filter to a framebuffer. + + const target = this.activeFramebuffer() || this; + + // Resize the framebuffer 'fbo' and adjust its pixel density if it doesn't match the target. + this.matchSize(fbo, target); + + fbo.draw(() => this.clear()); // prevent undesirable feedback effects accumulating secretly. + + let texelSize = [ + 1 / (target.width * target.pixelDensity()), + 1 / (target.height * target.pixelDensity()), + ]; + + // apply blur shader with multiple passes. + if (operation === constants.BLUR) { + // Treating 'tmp' as a framebuffer. + const tmp = this.getFilterLayerTemp(); + // Resize the framebuffer 'tmp' and adjust its pixel density if it doesn't match the target. + this.matchSize(tmp, target); + // setup + this.push(); + this.states.setValue("strokeColor", null); + this.blendMode(constants.BLEND); + + // draw main to temp buffer + this.shader(this.states.filterShader); + this.states.filterShader.setUniform("texelSize", texelSize); + this.states.filterShader.setUniform("canvasSize", [ + target.width, + target.height, + ]); + this.states.filterShader.setUniform( + "radius", + Math.max(1, filterParameter) + ); + + // Horiz pass: draw `target` to `tmp` + tmp.draw(() => { + this.states.filterShader.setUniform("direction", [1, 0]); + this.states.filterShader.setUniform("tex0", target); + this.clear(); + this.shader(this.states.filterShader); + this.noLights(); + this.plane(target.width, target.height); + }); + + // Vert pass: draw `tmp` to `fbo` + fbo.draw(() => { + this.states.filterShader.setUniform("direction", [0, 1]); + this.states.filterShader.setUniform("tex0", tmp); + this.clear(); + this.shader(this.states.filterShader); + this.noLights(); + this.plane(target.width, target.height); + }); + + this.pop(); + } + // every other non-blur shader uses single pass + else { + fbo.draw(() => { + this.states.setValue("strokeColor", null); + this.blendMode(constants.BLEND); + this.shader(this.states.filterShader); + this.states.filterShader.setUniform("tex0", target); + this.states.filterShader.setUniform("texelSize", texelSize); + this.states.filterShader.setUniform("canvasSize", [ + target.width, + target.height, + ]); + // filterParameter uniform only used for POSTERIZE, and THRESHOLD + // but shouldn't hurt to always set + this.states.filterShader.setUniform("filterParameter", filterParameter); + this.noLights(); + this.plane(target.width, target.height); + }); + } + // draw fbo contents onto main renderer. + this.push(); + this.states.setValue("strokeColor", null); + this.clear(); + this.push(); + this.states.setValue("imageMode", constants.CORNER); + this.blendMode(constants.BLEND); + target.filterCamera._resize(); + this.setCamera(target.filterCamera); + this.resetMatrix(); + this._drawingFilter = true; + this.image( + fbo, + 0, + 0, + this.width, + this.height, + -target.width / 2, + -target.height / 2, + target.width, + target.height + ); + this._drawingFilter = false; + this.clearDepth(); + this.pop(); + this.pop(); + } + + // Pass this off to the host instance so that we can treat a renderer and a + // framebuffer the same in filter() + + pixelDensity(newDensity) { + if (newDensity) { + return this._pInst.pixelDensity(newDensity); + } + return this._pInst.pixelDensity(); + } + + blendMode(mode) { + if ( + mode === constants.DARKEST || + mode === constants.LIGHTEST || + mode === constants.ADD || + mode === constants.BLEND || + mode === constants.SUBTRACT || + mode === constants.SCREEN || + mode === constants.EXCLUSION || + mode === constants.REPLACE || + mode === constants.MULTIPLY || + mode === constants.REMOVE + ) + this.states.setValue("curBlendMode", mode); + else if ( + mode === constants.BURN || + mode === constants.OVERLAY || + mode === constants.HARD_LIGHT || + mode === constants.SOFT_LIGHT || + mode === constants.DODGE + ) { + console.warn( + "BURN, OVERLAY, HARD_LIGHT, SOFT_LIGHT, and DODGE only work for blendMode in 2D mode." + ); + } + } + + erase(opacityFill, opacityStroke) { + if (!this._isErasing) { + this.preEraseBlend = this.states.curBlendMode; + this._isErasing = true; + this.blendMode(constants.REMOVE); + this._cachedFillStyle = this.states.curFillColor.slice(); + this.states.setValue("curFillColor", [1, 1, 1, opacityFill / 255]); + this._cachedStrokeStyle = this.states.curStrokeColor.slice(); + this.states.setValue("curStrokeColor", [1, 1, 1, opacityStroke / 255]); + } + } + + noErase() { + if (this._isErasing) { + // Restore colors + this.states.setValue("curFillColor", this._cachedFillStyle.slice()); + this.states.setValue("curStrokeColor", this._cachedStrokeStyle.slice()); + // Restore blend mode + this.states.setValue("curBlendMode", this.preEraseBlend); + this.blendMode(this.preEraseBlend); + // Ensure that _applyBlendMode() sets preEraseBlend back to the original blend mode + this._isErasing = false; + this._applyBlendMode(); + } + } + + _applyBlendMode() { + // By default, a noop + } + + drawTarget() { + return this.activeFramebuffers[this.activeFramebuffers.length - 1] || this; + } + + beginClip(options = {}) { + super.beginClip(options); + + this.drawTarget()._isClipApplied = true; + + this._applyClip(); + + this.push(); + this.resetShader(); + if (this.states.fillColor) this.fill(0, 0); + if (this.states.strokeColor) this.stroke(0, 0); + } + + endClip() { + this.pop(); + + this._unapplyClip(); + + // Mark the depth at which the clip has been applied so that we can clear it + // when we pop past this depth + this._clipDepths.push(this._pushPopDepth); + + super.endClip(); + } + + _clearClip() { + this._clearClipBuffer(); + if (this._clipDepths.length > 0) { + this._clipDepths.pop(); + } + this.drawTarget()._isClipApplied = false; + } + + /** + * @private + * @returns {p5.Framebuffer} A p5.Framebuffer set to match the size and settings + * of the renderer's canvas. It will be created if it does not yet exist, and + * reused if it does. + */ + _getTempFramebuffer() { + if (!this._tempFramebuffer) { + this._tempFramebuffer = new Framebuffer(this, { + format: constants.UNSIGNED_BYTE, + useDepth: this._pInst._glAttributes.depth, + depthFormat: constants.UNSIGNED_INT, + antialias: this._pInst._glAttributes.antialias, + }); + } + return this._tempFramebuffer; + } + + ////////////////////////////////////////////// + // HASH | for geometry + ////////////////////////////////////////////// + + geometryInHash(gid) { + return this.geometryBufferCache.isCached(gid); + } + + /** + * [resize description] + * @private + * @param {Number} w [description] + * @param {Number} h [description] + */ + resize(w, h) { + super.resize(w, h); + + // save canvas properties + const props = {}; + for (const key in this.drawingContext) { + const val = this.drawingContext[key]; + if (typeof val !== "object" && typeof val !== "function") { + props[key] = val; + } + } + + const dimensions = this._adjustDimensions(w, h); + w = dimensions.adjustedWidth; + h = dimensions.adjustedHeight; + + this.width = w; + this.height = h; + + this.canvas.width = w * this._pixelDensity; + this.canvas.height = h * this._pixelDensity; + this.canvas.style.width = `${w}px`; + this.canvas.style.height = `${h}px`; + this._updateViewport(); + this._updateSize(); + + this.states.curCamera._resize(); + + //resize pixels buffer + if (typeof this.pixels !== "undefined") { + this._createPixelsArray(); + } + + for (const framebuffer of this.framebuffers) { + // Notify framebuffers of the resize so that any auto-sized framebuffers + // can also update their size + framebuffer._canvasSizeChanged(); + } + + // reset canvas properties + for (const savedKey in props) { + try { + this.drawingContext[savedKey] = props[savedKey]; + } catch (err) { + // ignore read-only property errors + } + } + } + + applyMatrix(a, b, c, d, e, f) { + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + if (arguments.length === 16) { + // this.states.uModelMatrix.apply(arguments); + Matrix.prototype.apply.apply(this.states.uModelMatrix, arguments); + } else { + this.states.uModelMatrix.apply([ + a, + b, + 0, + 0, + c, + d, + 0, + 0, + 0, + 0, + 1, + 0, + e, + f, + 0, + 1, + ]); + } + } + + /** + * [translate description] + * @private + * @param {Number} x [description] + * @param {Number} y [description] + * @param {Number} z [description] + * @chainable + * @todo implement handle for components or vector as args + */ + translate(x, y, z) { + if (x instanceof Vector) { + z = x.z; + y = x.y; + x = x.x; + } + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + this.states.uModelMatrix.translate([x, y, z]); + return this; + } + + /** + * Scales the Model View Matrix by a vector + * @private + * @param {Number | p5.Vector | Array} x [description] + * @param {Number} [y] y-axis scalar + * @param {Number} [z] z-axis scalar + * @chainable + */ + scale(x, y, z) { + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + this.states.uModelMatrix.scale(x, y, z); + return this; + } + + rotate(rad, axis) { + if (typeof axis === "undefined") { + return this.rotateZ(rad); + } + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + Matrix.prototype.rotate4x4.apply(this.states.uModelMatrix, arguments); + return this; + } + + rotateX(rad) { + this.rotate(rad, 1, 0, 0); + return this; + } + + rotateY(rad) { + this.rotate(rad, 0, 1, 0); + return this; + } + + rotateZ(rad) { + this.rotate(rad, 0, 0, 1); + return this; + } + + pop(...args) { + if ( + this._clipDepths.length > 0 && + this._pushPopDepth === this._clipDepths[this._clipDepths.length - 1] + ) { + this._clearClip(); + if (!this._userEnabledStencil) { + this._internalDisable.call(this.GL, this.GL.STENCIL_TEST); + } + + // Reset saved state + // this._userEnabledStencil = this._savedStencilTestState; + } + super.pop(...args); + this._applyStencilTestIfClipping(); + } + + resetMatrix() { + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + this.states.uModelMatrix.reset(); + this.states.setValue("uViewMatrix", this.states.uViewMatrix.clone()); + this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); + return this; + } + + ////////////////////////////////////////////// + // SHADER + ////////////////////////////////////////////// + + _getStrokeShader() { + // select the stroke shader to use + const stroke = this.states.userStrokeShader; + if (stroke) { + return stroke; + } + return this._getLineShader(); + } + + /* + * This method will handle both image shaders and + * fill shaders, returning the appropriate shader + * depending on the current context (image or shape). + */ + _getFillShader() { + // If drawing an image, check for user-defined image shader and filters + if (this._drawingImage) { + // Use user-defined image shader if available and no filter is applied + if (this.states.userImageShader && !this._drawingFilter) { + return this.states.userImageShader; + } else { + return this._getLightShader(); // Fallback to light shader + } + } + // If user has defined a fill shader, return that + else if (this.states.userFillShader) { + return this.states.userFillShader; + } + // Use normal shader if normal material is active + else if (this.states._useNormalMaterial) { + return this._getNormalShader(); + } + // Use light shader if lighting or textures are enabled + else if (this.states.enableLighting || this.states._tex) { + return this._getLightShader(); + } + // Default to color shader if no other conditions are met + return this._getColorShader(); + } + + _getPointShader() { + // select the point shader to use + const point = this.states.userPointShader; + if (!point || !point.isPointShader()) { + return this._getPointShader(); + } + return point; + } + + baseMaterialShader() { + return this._getLightShader(); + } + + baseNormalShader() { + return this._getNormalShader(); + } + + baseColorShader() { + return this._getColorShader(); + } + + /** + * TODO(dave): un-private this when there is a way to actually override the + * shader used for points + * + * Get the shader used when drawing points with `point()`. + * + * You can call `pointShader().modify()` + * and change any of the following hooks: + * - `void beforeVertex`: Called at the start of the vertex shader. + * - `vec3 getLocalPosition`: Update the position of vertices before transforms are applied. It takes in `vec3 position` and must return a modified version. + * - `vec3 getWorldPosition`: Update the position of vertices after transforms are applied. It takes in `vec3 position` and pust return a modified version. + * - `float getPointSize`: Update the size of the point. It takes in `float size` and must return a modified version. + * - `void afterVertex`: Called at the end of the vertex shader. + * - `void beforeFragment`: Called at the start of the fragment shader. + * - `bool shouldDiscard`: Points are drawn inside a square, with the corners discarded in the fragment shader to create a circle. Use this to change this logic. It takes in a `bool willDiscard` and must return a modified version. + * - `vec4 getFinalColor`: Update the final color after mixing. It takes in a `vec4 color` and must return a modified version. + * - `void afterFragment`: Called at the end of the fragment shader. + * + * Call `pointShader().inspectHooks()` to see all the possible hooks and + * their default implementations. + * + * @returns {p5.Shader} The `point()` shader + * @private() + */ + pointShader() { + return this._getPointShader(); + } + + baseStrokeShader() { + return this._getLineShader(); + } + + /** + * @private + * @returns {p5.Framebuffer|null} The currently active framebuffer, or null if + * the main canvas is the current draw target. + */ + activeFramebuffer() { + return this.activeFramebuffers[this.activeFramebuffers.length - 1] || null; + } + + createFramebuffer(options) { + return new Framebuffer(this, options); + } + + _setGlobalUniforms(shader) { + const modelMatrix = this.states.uModelMatrix; + const viewMatrix = this.states.uViewMatrix; + const projectionMatrix = this.states.uPMatrix; + const modelViewMatrix = modelMatrix.copy().mult(viewMatrix); + + shader.setUniform( + "uPerspective", + this.states.curCamera.useLinePerspective ? 1 : 0 + ); + shader.setUniform("uViewMatrix", viewMatrix.mat4); + shader.setUniform("uProjectionMatrix", projectionMatrix.mat4); + shader.setUniform("uModelMatrix", modelMatrix.mat4); + shader.setUniform("uModelViewMatrix", modelViewMatrix.mat4); + if (shader.uniforms.uModelViewProjectionMatrix) { + const modelViewProjectionMatrix = modelViewMatrix.copy(); + modelViewProjectionMatrix.mult(projectionMatrix); + shader.setUniform( + "uModelViewProjectionMatrix", + modelViewProjectionMatrix.mat4 + ); + } + if (shader.uniforms.uNormalMatrix) { + this.scratchMat3.inverseTranspose4x4(modelViewMatrix); + shader.setUniform("uNormalMatrix", this.scratchMat3.mat3); + } + if (shader.uniforms.uModelNormalMatrix) { + this.scratchMat3.inverseTranspose4x4(this.states.uModelMatrix); + shader.setUniform("uModelNormalMatrix", this.scratchMat3.mat3); + } + if (shader.uniforms.uCameraNormalMatrix) { + this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); + shader.setUniform("uCameraNormalMatrix", this.scratchMat3.mat3); + } + if (shader.uniforms.uCameraRotation) { + this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); + shader.setUniform("uCameraRotation", this.scratchMat3.mat3); + } + shader.setUniform("uViewport", this._viewport); + } + + _setStrokeUniforms(strokeShader) { + // set the uniform values + strokeShader.setUniform("uSimpleLines", this._simpleLines); + strokeShader.setUniform("uUseLineColor", this._useLineColor); + strokeShader.setUniform("uMaterialColor", this.states.curStrokeColor); + strokeShader.setUniform("uStrokeWeight", this.states.strokeWeight); + strokeShader.setUniform("uStrokeCap", STROKE_CAP_ENUM[this.curStrokeCap]); + strokeShader.setUniform( + "uStrokeJoin", + STROKE_JOIN_ENUM[this.curStrokeJoin] + ); + } + + _setFillUniforms(fillShader) { + this.mixedSpecularColor = [...this.states.curSpecularColor]; + const empty = this._getEmptyTexture(); + + if (this.states._useMetalness > 0) { + this.mixedSpecularColor = this.mixedSpecularColor.map( + (mixedSpecularColor, index) => + this.states.curFillColor[index] * this.states._useMetalness + + mixedSpecularColor * (1 - this.states._useMetalness) + ); + } + + // TODO: optimize + fillShader.setUniform("uUseVertexColor", this._useVertexColor); + fillShader.setUniform("uMaterialColor", this.states.curFillColor); + fillShader.setUniform("isTexture", !!this.states._tex); + // We need to explicitly set uSampler back to an empty texture here. + // In general, we record the last set texture so we can re-apply it + // the next time a shader is used. However, the texture() function + // works differently and is global p5 state. If the p5 state has + // been cleared, we also need to clear the value in uSampler to match. + fillShader.setUniform("uSampler", this.states._tex || empty); + fillShader.setUniform("uTint", this.states.tint); + + fillShader.setUniform("uHasSetAmbient", this.states._hasSetAmbient); + fillShader.setUniform("uAmbientMatColor", this.states.curAmbientColor); + fillShader.setUniform("uSpecularMatColor", this.mixedSpecularColor); + fillShader.setUniform("uEmissiveMatColor", this.states.curEmissiveColor); + fillShader.setUniform("uSpecular", this.states._useSpecularMaterial); + fillShader.setUniform("uEmissive", this.states._useEmissiveMaterial); + fillShader.setUniform("uShininess", this.states._useShininess); + fillShader.setUniform("uMetallic", this.states._useMetalness); + + this._setImageLightUniforms(fillShader); + + fillShader.setUniform("uUseLighting", this.states.enableLighting); + + const pointLightCount = this.states.pointLightDiffuseColors.length / 3; + fillShader.setUniform("uPointLightCount", pointLightCount); + fillShader.setUniform( + "uPointLightLocation", + this.states.pointLightPositions + ); + fillShader.setUniform( + "uPointLightDiffuseColors", + this.states.pointLightDiffuseColors + ); + fillShader.setUniform( + "uPointLightSpecularColors", + this.states.pointLightSpecularColors + ); + + const directionalLightCount = + this.states.directionalLightDiffuseColors.length / 3; + fillShader.setUniform("uDirectionalLightCount", directionalLightCount); + fillShader.setUniform( + "uLightingDirection", + this.states.directionalLightDirections + ); + fillShader.setUniform( + "uDirectionalDiffuseColors", + this.states.directionalLightDiffuseColors + ); + fillShader.setUniform( + "uDirectionalSpecularColors", + this.states.directionalLightSpecularColors + ); + + // TODO: sum these here... + let mixedAmbientLight = [0, 0, 0]; + for (let i = 0; i < this.states.ambientLightColors.length; i += 3) { + for (let off = 0; off < 3; off++) { + if (this.states._useMetalness > 0) { + mixedAmbientLight[off] += Math.max( + 0, + this.states.ambientLightColors[i + off] - this.states._useMetalness + ); + } else { + mixedAmbientLight[off] += this.states.ambientLightColors[i + off]; + } + } + } + fillShader.setUniform("uAmbientColor", mixedAmbientLight); + + const spotLightCount = this.states.spotLightDiffuseColors.length / 3; + fillShader.setUniform("uSpotLightCount", spotLightCount); + fillShader.setUniform("uSpotLightAngle", this.states.spotLightAngle); + fillShader.setUniform("uSpotLightConc", this.states.spotLightConc); + fillShader.setUniform( + "uSpotLightDiffuseColors", + this.states.spotLightDiffuseColors + ); + fillShader.setUniform( + "uSpotLightSpecularColors", + this.states.spotLightSpecularColors + ); + fillShader.setUniform("uSpotLightLocation", this.states.spotLightPositions); + fillShader.setUniform( + "uSpotLightDirection", + this.states.spotLightDirections + ); + + fillShader.setUniform( + "uConstantAttenuation", + this.states.constantAttenuation + ); + fillShader.setUniform("uLinearAttenuation", this.states.linearAttenuation); + fillShader.setUniform( + "uQuadraticAttenuation", + this.states.quadraticAttenuation + ); + } + + // getting called from _setFillUniforms + _setImageLightUniforms(shader) { + //set uniform values + shader.setUniform("uUseImageLight", this.states.activeImageLight != null); + // true + if (this.states.activeImageLight) { + // this.states.activeImageLight has image as a key + // look up the texture from the diffusedTexture map + let diffusedLight = this.getDiffusedTexture(this.states.activeImageLight); + shader.setUniform("environmentMapDiffused", diffusedLight); + let specularLight = this.getSpecularTexture(this.states.activeImageLight); + + shader.setUniform("environmentMapSpecular", specularLight); + } + } + + _setPointUniforms(pointShader) { + // set the uniform values + pointShader.setUniform("uMaterialColor", this.states.curStrokeColor); + // @todo is there an instance where this isn't stroke weight? + // should be they be same var? + pointShader.setUniform( + "uPointSize", + this.states.strokeWeight * this._pixelDensity + ); + } + + /** + * @private + * Note: DO NOT CALL THIS while in the middle of binding another texture, + * since it will change the texture binding in order to allocate the empty + * texture! Grab its value beforehand! + */ + _getEmptyTexture() { + if (!this._emptyTexture) { + // a plain white texture RGBA, full alpha, single pixel. + const im = new Image(1, 1); + im.set(0, 0, 255); + this._emptyTexture = new Texture(this, im); + } + return this._emptyTexture; + } + + getTexture(input) { + let src = input; + if (src instanceof Framebuffer) { + src = src.color; + } + + const texture = this.textures.get(src); + if (texture) { + return texture; + } + + const tex = new Texture(this, src); + this.textures.set(src, tex); + return tex; + } + + ////////////////////////////////////////////// + // Buffers + ////////////////////////////////////////////// + + _normalizeBufferData(values, type = Float32Array) { + if (!values) return null; + if (values instanceof DataArray) { + return values.dataArray(); + } + if (values instanceof type) { + return values; + } + return new type(values); + } + + /////////////////////////////// + //// UTILITY FUNCTIONS + ////////////////////////////// + _arraysEqual(a, b) { + const aLength = a.length; + if (aLength !== b.length) return false; + return a.every((ai, i) => ai === b[i]); + } + + _isTypedArray(arr) { + return [ + Float32Array, + Float64Array, + Int16Array, + Uint16Array, + Uint32Array, + ].some((x) => arr instanceof x); + } + + /** + * turn a p5.Vector Array into a one dimensional number array + * @private + * @param {p5.Vector[]} arr an array of p5.Vector + * @return {Number[]} a one dimensional array of numbers + * [p5.Vector(1, 2, 3), p5.Vector(4, 5, 6)] -> + * [1, 2, 3, 4, 5, 6] + */ + _vToNArray(arr) { + return arr.flatMap((item) => [item.x, item.y, item.z]); + } +} diff --git a/src/core/rendering.js b/src/core/rendering.js index 5c145b24f1..c99abbf317 100644 --- a/src/core/rendering.js +++ b/src/core/rendering.js @@ -151,7 +151,11 @@ function rendering(p5, fn){ }); } - return this._renderer; + if (this._renderer.contextReady) { + return this._renderer.contextReady.then(() => this._renderer); + } else { + return this._renderer; + } }; /** diff --git a/src/image/filterRenderer2D.js b/src/image/filterRenderer2D.js index cd0a97fe0a..fe47646851 100644 --- a/src/image/filterRenderer2D.js +++ b/src/image/filterRenderer2D.js @@ -1,6 +1,13 @@ import { Shader } from '../webgl/p5.Shader'; import { Texture } from '../webgl/p5.Texture'; import { Image } from './p5.Image'; +import { + getWebGLShaderAttributes, + getWebGLUniformMetadata, + populateGLSLHooks, + setWebGLTextureParams, + setWebGLUniformValue +} from "../webgl/utils"; import * as constants from '../core/constants'; import filterGrayFrag from '../webgl/shaders/filters/gray.frag'; @@ -42,6 +49,9 @@ class FilterRenderer2D { console.error('WebGL not supported, cannot apply filter.'); return; } + + this.textures = new Map(); + // Minimal renderer object required by p5.Shader and p5.Texture this._renderer = { GL: this.gl, @@ -50,8 +60,8 @@ class FilterRenderer2D { _emptyTexture: null, webglVersion, states: { - textureWrapX: this.gl.CLAMP_TO_EDGE, - textureWrapY: this.gl.CLAMP_TO_EDGE + textureWrapX: constants.CLAMP, + textureWrapY: constants.CLAMP, }, _arraysEqual: (a, b) => JSON.stringify(a) === JSON.stringify(b), _getEmptyTexture: () => { @@ -61,6 +71,167 @@ class FilterRenderer2D { this._emptyTexture = new Texture(this._renderer, im); } return this._emptyTexture; + }, + _initShader: (shader) => { + const gl = this.gl; + + const vertShader = gl.createShader(gl.VERTEX_SHADER); + gl.shaderSource(vertShader, shader.vertSrc()); + gl.compileShader(vertShader); + if (!gl.getShaderParameter(vertShader, gl.COMPILE_STATUS)) { + throw new Error(`Yikes! An error occurred compiling the vertex shader: ${ + gl.getShaderInfoLog(vertShader) + }`); + } + + const fragShader = gl.createShader(gl.FRAGMENT_SHADER); + gl.shaderSource(fragShader, shader.fragSrc()); + gl.compileShader(fragShader); + if (!gl.getShaderParameter(fragShader, gl.COMPILE_STATUS)) { + throw new Error(`Darn! An error occurred compiling the fragment shader: ${ + gl.getShaderInfoLog(fragShader) + }`); + } + + const program = gl.createProgram(); + gl.attachShader(program, vertShader); + gl.attachShader(program, fragShader); + gl.linkProgram(program); + + if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { + throw new Error( + `Snap! Error linking shader program: ${gl.getProgramInfoLog(program)}` + ); + } + + shader._glProgram = program; + shader._vertShader = vertShader; + shader._fragShader = fragShader; + }, + getTexture: (input) => { + let src = input; + if (src instanceof Framebuffer) { + src = src.color; + } + + const texture = this.textures.get(src); + if (texture) { + return texture; + } + + const tex = new Texture(this._renderer, src); + this.textures.set(src, tex); + return tex; + }, + populateHooks: (shader, src, shaderType) => { + return populateGLSLHooks(shader, src, shaderType); + }, + _getShaderAttributes: (shader) => { + return getWebGLShaderAttributes(shader, this.gl); + }, + getUniformMetadata: (shader) => { + return getWebGLUniformMetadata(shader, this.gl); + }, + _finalizeShader: () => {}, + _useShader: (shader) => { + this.gl.useProgram(shader._glProgram); + }, + bindTexture: (tex) => { + // bind texture using gl context + glTarget and + // generated gl texture object + this.gl.bindTexture(this.gl.TEXTURE_2D, tex.getTexture().texture); + }, + unbindTexture: () => { + // unbind per above, disable texturing on glTarget + this.gl.bindTexture(this.gl.TEXTURE_2D, null); + }, + _unbindFramebufferTexture: (uniform) => { + // Make sure an empty texture is bound to the slot so that we don't + // accidentally leave a framebuffer bound, causing a feedback loop + // when something else tries to write to it + const gl = this.gl; + const empty = this._getEmptyTexture(); + gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); + empty.bindTexture(); + gl.uniform1i(uniform.location, uniform.samplerIndex); + }, + createTexture: ({ width, height, format, dataType }) => { + const gl = this.gl; + const tex = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, tex); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, + gl.RGBA, gl.UNSIGNED_BYTE, null); + // TODO use format and data type + return { texture: tex, glFormat: gl.RGBA, glDataType: gl.UNSIGNED_BYTE }; + }, + uploadTextureFromSource: ({ texture, glFormat, glDataType }, source) => { + const gl = this.gl; + gl.bindTexture(gl.TEXTURE_2D, texture); + gl.texImage2D(gl.TEXTURE_2D, 0, glFormat, glFormat, glDataType, source); + }, + uploadTextureFromData: ({ texture, glFormat, glDataType }, data, width, height) => { + const gl = this.gl; + gl.bindTexture(gl.TEXTURE_2D, texture); + gl.texImage2D( + gl.TEXTURE_2D, + 0, + glFormat, + width, + height, + 0, + glFormat, + glDataType, + data + ); + }, + setTextureParams: (texture) => { + return setWebGLTextureParams(texture, this.gl, this._renderer.webglVersion); + }, + updateUniformValue: (shader, uniform, data) => { + return setWebGLUniformValue( + shader, + uniform, + data, + (tex) => this._renderer.getTexture(tex), + this.gl + ); + }, + _enableAttrib: (_shader, attr, size, type, normalized, stride, offset) => { + const loc = attr.location; + const gl = this.gl; + // Enable register even if it is disabled + if (!this._renderer.registerEnabled.has(loc)) { + gl.enableVertexAttribArray(loc); + // Record register availability + this._renderer.registerEnabled.add(loc); + } + gl.vertexAttribPointer( + loc, + size, + type || gl.FLOAT, + normalized || false, + stride || 0, + offset || 0 + ); + }, + _disableRemainingAttributes: (shader) => { + for (const location of this._renderer.registerEnabled.values()) { + if ( + !Object.keys(shader.attributes).some( + key => shader.attributes[key].location === location + ) + ) { + this.gl.disableVertexAttribArray(location); + this._renderer.registerEnabled.delete(location); + } + } + }, + _updateTexture: (uniform, tex) => { + const gl = this.gl; + gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); + tex.bindTexture(); + tex.update(); + gl.uniform1i(uniform.location, uniform.samplerIndex); } }; @@ -231,7 +402,7 @@ class FilterRenderer2D { */ _renderPass() { const gl = this.gl; - this._shader.bindShader(); + this._shader.bindShader('fill'); const pixelDensity = this.pInst.pixelDensity ? this.pInst.pixelDensity() : 1; @@ -271,7 +442,7 @@ class FilterRenderer2D { this._shader.enableAttrib(this._shader.attributes.aTexCoord, 2); this._shader.bindTextures(); - this._shader.disableRemainingAttributes(); + this._renderer._disableRemainingAttributes(this._shader); // Draw the quad gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); diff --git a/src/image/pixels.js b/src/image/pixels.js index f88818faed..526811fe59 100644 --- a/src/image/pixels.js +++ b/src/image/pixels.js @@ -797,7 +797,7 @@ function pixels(p5, fn){ */ fn.loadPixels = function(...args) { // p5._validateParameters('loadPixels', args); - this._renderer.loadPixels(); + return this._renderer.loadPixels(); }; /** diff --git a/src/shape/custom_shapes.js b/src/shape/custom_shapes.js index f9b2da245d..97c8b02b6b 100644 --- a/src/shape/custom_shapes.js +++ b/src/shape/custom_shapes.js @@ -1170,10 +1170,12 @@ class PrimitiveToPath2DConverter extends PrimitiveVisitor { class PrimitiveToVerticesConverter extends PrimitiveVisitor { contours = []; curveDetail; + pointsToLines; - constructor({ curveDetail = 1 } = {}) { + constructor({ curveDetail = 1, pointsToLines = true } = {}) { super(); this.curveDetail = curveDetail; + this.pointsToLines = pointsToLines; } lastContour() { @@ -1248,7 +1250,11 @@ class PrimitiveToVerticesConverter extends PrimitiveVisitor { } } visitPoint(point) { - this.contours.push(point.vertices.slice()); + if (this.pointsToLines) { + this.contours.push(...point.vertices.map(v => [v, v])); + } else { + this.contours.push(point.vertices.slice()); + } } visitLine(line) { this.contours.push(line.vertices.slice()); @@ -1823,7 +1829,7 @@ function customShapes(p5, fn) { * } * * - * + * * @example *
* @@ -1833,44 +1839,44 @@ function customShapes(p5, fn) { * let vertexD; * let vertexE; * let vertexF; - * + * * let markerRadius; - * + * * let vectorAB; * let vectorFE; - * + * * let endOfTangentB; * let endOfTangentE; - * + * * function setup() { * createCanvas(100, 100); - * + * * // Initialize variables * // Adjusting vertices A and F affects the slopes at B and E - * + * * vertexA = createVector(35, 85); * vertexB = createVector(25, 70); * vertexC = createVector(30, 30); * vertexD = createVector(70, 30); * vertexE = createVector(75, 70); * vertexF = createVector(65, 85); - * + * * markerRadius = 4; - * + * * vectorAB = p5.Vector.sub(vertexB, vertexA); * vectorFE = p5.Vector.sub(vertexE, vertexF); - * + * * endOfTangentB = p5.Vector.add(vertexC, vectorAB); * endOfTangentE = p5.Vector.add(vertexD, vectorFE); - * + * * splineProperty(`ends`, EXCLUDE); - * + * * // Draw figure - * + * * background(220); - * + * * noFill(); - * + * * beginShape(); * splineVertex(vertexA.x, vertexA.y); * splineVertex(vertexB.x, vertexB.y); @@ -1879,15 +1885,15 @@ function customShapes(p5, fn) { * splineVertex(vertexE.x, vertexE.y); * splineVertex(vertexF.x, vertexF.y); * endShape(); - * + * * stroke('red'); * line(vertexA.x, vertexA.y, vertexC.x, vertexC.y); * line(vertexB.x, vertexB.y, endOfTangentB.x, endOfTangentB.y); - * + * * stroke('blue'); * line(vertexD.x, vertexD.y, vertexF.x, vertexF.y); * line(vertexE.x, vertexE.y, endOfTangentE.x, endOfTangentE.y); - * + * * fill('white'); * stroke('black'); * circle(vertexA.x, vertexA.y, markerRadius); @@ -1896,7 +1902,7 @@ function customShapes(p5, fn) { * circle(vertexD.x, vertexD.y, markerRadius); * circle(vertexE.x, vertexE.y, markerRadius); * circle(vertexF.x, vertexF.y, markerRadius); - * + * * fill('black'); * noStroke(); * text('A', vertexA.x - 15, vertexA.y + 5); @@ -1905,7 +1911,7 @@ function customShapes(p5, fn) { * text('D', vertexD.x - 5, vertexD.y - 5); * text('E', vertexE.x + 5, vertexE.y + 5); * text('F', vertexF.x + 5, vertexF.y + 5); - * + * * describe('On a gray background, a black spline passes through vertices A, B, C, D, E, and F, shown as white circles. A red line segment joining vertices A and C has the same slope as the red tangent segment at B. Similarly, the blue line segment joining vertices D and F has the same slope as the blue tangent at E.'); * } * @@ -2069,7 +2075,7 @@ function customShapes(p5, fn) { * spline(25, 46, 93, 44, 93, 81, 35, 85); * ``` * - * + * * In all cases, the splines in p5.js are cardinal splines. * When tightness is 0, these splines are often known as * Catmull-Rom splines @@ -2087,7 +2093,6 @@ function customShapes(p5, fn) { * * function setup() { * createCanvas(100, 100); - * * } * * function draw() { @@ -2185,9 +2190,9 @@ function customShapes(p5, fn) { * } * *
- * + * * @example - * + * *
* * let vertexA; @@ -2196,44 +2201,44 @@ function customShapes(p5, fn) { * let vertexD; * let vertexE; * let vertexF; - * + * * let markerRadius; - * + * * let vectorAB; * let vectorFE; - * + * * let endOfTangentB; * let endOfTangentE; - * + * * function setup() { * createCanvas(100, 100); - * + * * // Initialize variables * // Adjusting vertices A and F affects the slopes at B and E - * + * * vertexA = createVector(35, 85); * vertexB = createVector(25, 70); * vertexC = createVector(30, 30); * vertexD = createVector(70, 30); * vertexE = createVector(75, 70); * vertexF = createVector(65, 85); - * + * * markerRadius = 4; - * + * * vectorAB = p5.Vector.sub(vertexB, vertexA); * vectorFE = p5.Vector.sub(vertexE, vertexF); - * + * * endOfTangentB = p5.Vector.add(vertexC, vectorAB); * endOfTangentE = p5.Vector.add(vertexD, vectorFE); - * + * * splineProperty(`ends`, EXCLUDE); - * + * * // Draw figure - * + * * background(220); - * + * * noFill(); - * + * * beginShape(); * splineVertex(vertexA.x, vertexA.y); * splineVertex(vertexB.x, vertexB.y); @@ -2242,15 +2247,15 @@ function customShapes(p5, fn) { * splineVertex(vertexE.x, vertexE.y); * splineVertex(vertexF.x, vertexF.y); * endShape(); - * + * * stroke('red'); * line(vertexA.x, vertexA.y, vertexC.x, vertexC.y); * line(vertexB.x, vertexB.y, endOfTangentB.x, endOfTangentB.y); - * + * * stroke('blue'); * line(vertexD.x, vertexD.y, vertexF.x, vertexF.y); * line(vertexE.x, vertexE.y, endOfTangentE.x, endOfTangentE.y); - * + * * fill('white'); * stroke('black'); * circle(vertexA.x, vertexA.y, markerRadius); @@ -2259,7 +2264,7 @@ function customShapes(p5, fn) { * circle(vertexD.x, vertexD.y, markerRadius); * circle(vertexE.x, vertexE.y, markerRadius); * circle(vertexF.x, vertexF.y, markerRadius); - * + * * fill('black'); * noStroke(); * text('A', vertexA.x - 15, vertexA.y + 5); @@ -2268,12 +2273,12 @@ function customShapes(p5, fn) { * text('D', vertexD.x - 5, vertexD.y - 5); * text('E', vertexE.x + 5, vertexE.y + 5); * text('F', vertexF.x + 5, vertexF.y + 5); - * + * * describe('On a gray background, a black spline passes through vertices A, B, C, D, E, and F, shown as white circles. A red line segment joining vertices A and C has the same slope as the red tangent segment at B. Similarly, the blue line segment joining vertices D and F has the same slope as the blue tangent at E.'); * } * *
- * + * */ /** diff --git a/src/webgl/3d_primitives.js b/src/webgl/3d_primitives.js index cc07f71f78..fe370cda86 100644 --- a/src/webgl/3d_primitives.js +++ b/src/webgl/3d_primitives.js @@ -7,7 +7,7 @@ */ import * as constants from '../core/constants'; -import { RendererGL } from './p5.RendererGL'; +import { Renderer3D } from '../core/p5.Renderer3D'; import { Vector } from '../math/p5.Vector'; import { Geometry } from './p5.Geometry'; import { Matrix } from '../math/p5.Matrix'; @@ -1637,7 +1637,7 @@ function primitives3D(p5, fn){ /////////////////////// // // Note: Documentation is not generated on the p5.js website for functions on - // the p5.RendererGL prototype. + // the p5.Renderer3D prototype. /** * Draws a point, a coordinate in space at the dimension of one pixel, @@ -1671,16 +1671,15 @@ function primitives3D(p5, fn){ * * */ - RendererGL.prototype.point = function(x, y, z = 0) { - - const _vertex = []; - _vertex.push(new Vector(x, y, z)); - this._drawPoints(_vertex, this.buffers.point); + Renderer3D.prototype.point = function(x, y, z = 0) { + this.beginShape(constants.POINTS); + this.vertex(x, y, z); + this.endShape(); return this; }; - RendererGL.prototype.triangle = function(args) { + Renderer3D.prototype.triangle = function(args) { const x1 = args[0], y1 = args[1]; const x2 = args[2], @@ -1734,7 +1733,7 @@ function primitives3D(p5, fn){ return this; }; - RendererGL.prototype.ellipse = function(args) { + Renderer3D.prototype.ellipse = function(args) { this.arc( args[0], args[1], @@ -1747,7 +1746,7 @@ function primitives3D(p5, fn){ ); }; - RendererGL.prototype.arc = function(...args) { + Renderer3D.prototype.arc = function(...args) { const x = args[0]; const y = args[1]; const width = args[2]; @@ -1866,7 +1865,7 @@ function primitives3D(p5, fn){ return this; }; - RendererGL.prototype.rect = function(args) { + Renderer3D.prototype.rect = function(args) { const x = args[0]; const y = args[1]; const width = args[2]; @@ -1875,7 +1874,7 @@ function primitives3D(p5, fn){ if (typeof args[4] === 'undefined') { // Use the retained mode for drawing rectangle, // if args for rounding rectangle is not provided by user. - const perPixelLighting = this._pInst._glAttributes.perPixelLighting; + const perPixelLighting = this._pInst._glAttributes?.perPixelLighting; const detailX = args[4] || (perPixelLighting ? 1 : 24); const detailY = args[5] || (perPixelLighting ? 1 : 16); const gid = `rect|${detailX}|${detailY}`; @@ -2004,8 +2003,7 @@ function primitives3D(p5, fn){ return this; }; - - RendererGL.prototype.quad = function( + Renderer3D.prototype.quad = function( x1, y1, z1, x2, y2, z2, x3, y3, z3, @@ -2014,7 +2012,6 @@ function primitives3D(p5, fn){ detailY=2 ) { - const gid = `quad|${x1}|${y1}|${z1}|${x2}|${y2}|${z2}|${x3}|${y3}|${z3}|${x4}|${y4}|${z4}|${detailX}|${detailY}`; @@ -2076,7 +2073,7 @@ function primitives3D(p5, fn){ //this implementation of bezier curve //is based on Bernstein polynomial // pretier-ignore - RendererGL.prototype.bezier = function( + Renderer3D.prototype.bezier = function( x1, y1, z1, // x2 @@ -2111,7 +2108,7 @@ function primitives3D(p5, fn){ }; // pretier-ignore - RendererGL.prototype.curve = function( + Renderer3D.prototype.curve = function( x1, y1, z1, // x2 @@ -2171,7 +2168,7 @@ function primitives3D(p5, fn){ * * */ - RendererGL.prototype.line = function(...args) { + Renderer3D.prototype.line = function(...args) { if (args.length === 6) { // TODO shapes refactor this.beginShape(constants.LINES); @@ -2187,7 +2184,7 @@ function primitives3D(p5, fn){ return this; }; - RendererGL.prototype.image = function( + Renderer3D.prototype.image = function( img, sx, sy, @@ -2367,7 +2364,7 @@ function primitives3D(p5, fn){ } }; - RendererGL.prototype.plane = function( + Renderer3D.prototype.plane = function( width = 50, height = width, detailX = 1, @@ -2409,7 +2406,7 @@ function primitives3D(p5, fn){ ); }; - RendererGL.prototype.box = function( + Renderer3D.prototype.box = function( width = 50, height = width, depth = height, @@ -2495,7 +2492,7 @@ function primitives3D(p5, fn){ ); }; - RendererGL.prototype.sphere = function( + Renderer3D.prototype.sphere = function( radius = 50, detailX = 24, detailY = 16 @@ -2503,7 +2500,7 @@ function primitives3D(p5, fn){ this.ellipsoid(radius, radius, radius, detailX, detailY); }; - RendererGL.prototype.ellipsoid = function( + Renderer3D.prototype.ellipsoid = function( radiusX = 50, radiusY = radiusX, radiusZ = radiusX, @@ -2556,7 +2553,7 @@ function primitives3D(p5, fn){ ); }; - RendererGL.prototype.cylinder = function( + Renderer3D.prototype.cylinder = function( radius = 50, height = radius, detailX = 24, @@ -2599,7 +2596,7 @@ function primitives3D(p5, fn){ ); }; - RendererGL.prototype.cone = function( + Renderer3D.prototype.cone = function( radius = 50, height = radius, detailX = 24, @@ -2640,7 +2637,7 @@ function primitives3D(p5, fn){ ); }; - RendererGL.prototype.torus = function( + Renderer3D.prototype.torus = function( radius = 50, tubeRadius = 10, detailX = 24, @@ -2767,7 +2764,7 @@ function primitives3D(p5, fn){ * */ fn.curveDetail = function(d) { - if (!(this._renderer instanceof RendererGL)) { + if (!(this._renderer instanceof Renderer3D)) { throw new Error( 'curveDetail() only works in WebGL mode. Did you mean to call createCanvas(width, height, WEBGL)?' ); diff --git a/src/webgl/GeometryBufferCache.js b/src/webgl/GeometryBufferCache.js index 337383fd55..f6f69ec1bf 100644 --- a/src/webgl/GeometryBufferCache.js +++ b/src/webgl/GeometryBufferCache.js @@ -30,57 +30,32 @@ export class GeometryBufferCache { throw new Error('The p5.Geometry you passed in has no gid property!'); } - if (this.isCached(geometry.gid)) return this.getCached(geometry); + if (this.isCached(gid)) return this.getCached(geometry); - const gl = this.renderer.GL; - - //initialize the gl buffers for our geom groups + // Cache maintenance this.freeBuffers(gid); - if (Object.keys(this.cache).length > 1000) { const key = Object.keys(this.cache)[0]; this.freeBuffers(key); } - //create a new entry in our cache - const buffers = {}; + const buffers = { geometry }; this.cache[gid] = buffers; - buffers.geometry = geometry; - - let indexBuffer = buffers.indexBuffer; - - if (geometry.faces.length) { - // allocate space for faces - if (!indexBuffer) indexBuffer = buffers.indexBuffer = gl.createBuffer(); - const vals = geometry.faces.flat(); + const indices = geometry.faces.length ? geometry.faces.flat() : null; + // Determine index buffer type + let indexType = null; + if (indices) { // If any face references a vertex with an index greater than the maximum // un-singed 16 bit integer, then we need to use a Uint32Array instead of a // Uint16Array - const hasVertexIndicesOverMaxUInt16 = vals.some(v => v > 65535); - let type = hasVertexIndicesOverMaxUInt16 ? Uint32Array : Uint16Array; - this.renderer._bindBuffer( - indexBuffer, - gl.ELEMENT_ARRAY_BUFFER, - vals, - type - ); - - // If we're using a Uint32Array for our indexBuffer we will need to pass a - // different enum value to WebGL draw triangles. This happens in - // the _drawElements function. - buffers.indexBufferType = hasVertexIndicesOverMaxUInt16 - ? gl.UNSIGNED_INT - : gl.UNSIGNED_SHORT; - } else { - // the index buffer is unused, remove it - if (indexBuffer) { - gl.deleteBuffer(indexBuffer); - buffers.indexBuffer = null; - } + const hasVertexIndicesOverMaxUInt16 = indices.some(i => i > 65535); + indexType = hasVertexIndicesOverMaxUInt16 ? Uint32Array : Uint16Array; } + this.renderer._ensureGeometryBuffers(buffers, indices, indexType); + return buffers; } @@ -92,23 +67,6 @@ export class GeometryBufferCache { delete this.cache[gid]; - const gl = this.renderer.GL; - if (buffers.indexBuffer) { - gl.deleteBuffer(buffers.indexBuffer); - } - - function freeBuffers(defs) { - for (const def of defs) { - if (buffers[def.dst]) { - gl.deleteBuffer(buffers[def.dst]); - buffers[def.dst] = null; - } - } - } - - // free all the buffers - freeBuffers(this.renderer.buffers.stroke); - freeBuffers(this.renderer.buffers.fill); - freeBuffers(this.renderer.buffers.user); + this.renderer._freeBuffers(buffers); } } diff --git a/src/webgl/light.js b/src/webgl/light.js index 21f9b74972..d90c80246b 100644 --- a/src/webgl/light.js +++ b/src/webgl/light.js @@ -5,7 +5,7 @@ * @requires core */ -import { RendererGL } from './p5.RendererGL'; +import { Renderer3D } from '../core/p5.Renderer3D'; import { Vector } from '../math/p5.Vector'; import { Color } from '../color/p5.Color'; @@ -1450,7 +1450,7 @@ function light(p5, fn){ }; - RendererGL.prototype.ambientLight = function(v1, v2, v3, a) { + Renderer3D.prototype.ambientLight = function(v1, v2, v3, a) { const color = this._pInst.color(...arguments); this.states.setValue('ambientLightColors', [...this.states.ambientLightColors]); @@ -1463,7 +1463,7 @@ function light(p5, fn){ this.states.setValue('enableLighting', true); }; - RendererGL.prototype.specularColor = function(v1, v2, v3) { + Renderer3D.prototype.specularColor = function(v1, v2, v3) { const color = this._pInst.color(...arguments); this.states.setValue('specularColors', [ @@ -1473,7 +1473,7 @@ function light(p5, fn){ ]); }; - RendererGL.prototype.directionalLight = function(v1, v2, v3, x, y, z) { + Renderer3D.prototype.directionalLight = function(v1, v2, v3, x, y, z) { let color; if (v1 instanceof Color) { color = v1; @@ -1514,7 +1514,7 @@ function light(p5, fn){ this.states.setValue('enableLighting', true); }; - RendererGL.prototype.pointLight = function(v1, v2, v3, x, y, z) { + Renderer3D.prototype.pointLight = function(v1, v2, v3, x, y, z) { let color; if (v1 instanceof Color) { color = v1; @@ -1553,20 +1553,20 @@ function light(p5, fn){ this.states.setValue('enableLighting', true); }; - RendererGL.prototype.imageLight = function(img) { + Renderer3D.prototype.imageLight = function(img) { // activeImageLight property is checked by _setFillUniforms // for sending uniforms to the fillshader this.states.setValue('activeImageLight', img); this.states.setValue('enableLighting', true); }; - RendererGL.prototype.lights = function() { + Renderer3D.prototype.lights = function() { const grayColor = this._pInst.color('rgb(128,128,128)'); this.ambientLight(grayColor); this.directionalLight(grayColor, 0, 0, -1); }; - RendererGL.prototype.lightFalloff = function( + Renderer3D.prototype.lightFalloff = function( constantAttenuation, linearAttenuation, quadraticAttenuation @@ -1607,7 +1607,7 @@ function light(p5, fn){ this.states.setValue('quadraticAttenuation', quadraticAttenuation); }; - RendererGL.prototype.spotLight = function( + Renderer3D.prototype.spotLight = function( v1, v2, v3, @@ -1620,6 +1620,8 @@ function light(p5, fn){ angle, concentration ) { + if (this.states.spotLightDiffuseColors.length / 3 >= 4) return; + let color, position, direction; const length = arguments.length; @@ -1777,18 +1779,26 @@ function light(p5, fn){ return; } this.states.setValue('spotLightDiffuseColors', [ + ...this.states.spotLightDiffuseColors, color._array[0], color._array[1], color._array[2] ]); this.states.setValue('spotLightSpecularColors', [ + ...this.states.spotLightSpecularColors, ...this.states.specularColors ]); - this.states.setValue('spotLightPositions', [position.x, position.y, position.z]); + this.states.setValue('spotLightPositions', [ + ...this.states.spotLightPositions, + position.x, + position.y, + position.z + ]); direction.normalize(); this.states.setValue('spotLightDirections', [ + ...this.states.spotLightDirections, direction.x, direction.y, direction.z @@ -1808,13 +1818,13 @@ function light(p5, fn){ } angle = this._pInst._toRadians(angle); - this.states.setValue('spotLightAngle', [Math.cos(angle)]); - this.states.setValue('spotLightConc', [concentration]); + this.states.setValue('spotLightAngle', [...this.states.spotLightAngle, Math.cos(angle)]); + this.states.setValue('spotLightConc', [...this.states.spotLightConc, concentration]); this.states.setValue('enableLighting', true); }; - RendererGL.prototype.noLights = function() { + Renderer3D.prototype.noLights = function() { this.states.setValue('activeImageLight', null); this.states.setValue('enableLighting', false); diff --git a/src/webgl/material.js b/src/webgl/material.js index b511257ae4..60f01a3969 100644 --- a/src/webgl/material.js +++ b/src/webgl/material.js @@ -6,7 +6,7 @@ */ import * as constants from '../core/constants'; -import { RendererGL } from './p5.RendererGL'; +import { Renderer3D } from '../core/p5.Renderer3D'; import { Shader } from './p5.Shader'; import { request } from '../io/files'; import { Color } from '../color/p5.Color'; @@ -3662,131 +3662,7 @@ function material(p5, fn){ return this; }; - - /** - * @private blends colors according to color components. - * If alpha value is less than 1, or non-standard blendMode - * we need to enable blending on our gl context. - * @param {Number[]} color The currently set color, with values in 0-1 range - * @param {Boolean} [hasTransparency] Whether the shape being drawn has other - * transparency internally, e.g. via vertex colors - * @return {Number[]} Normalized numbers array - */ - RendererGL.prototype._applyColorBlend = function (colors, hasTransparency) { - const gl = this.GL; - - const isTexture = this.states.drawMode === constants.TEXTURE; - const doBlend = - hasTransparency || - this.states.userFillShader || - this.states.userStrokeShader || - this.states.userPointShader || - isTexture || - this.states.curBlendMode !== constants.BLEND || - colors[colors.length - 1] < 1.0 || - this._isErasing; - - if (doBlend !== this._isBlending) { - if ( - doBlend || - (this.states.curBlendMode !== constants.BLEND && - this.states.curBlendMode !== constants.ADD) - ) { - gl.enable(gl.BLEND); - } else { - gl.disable(gl.BLEND); - } - gl.depthMask(true); - this._isBlending = doBlend; - } - this._applyBlendMode(); - return colors; - }; - - /** - * @private sets blending in gl context to curBlendMode - * @param {Number[]} color [description] - * @return {Number[]} Normalized numbers array - */ - RendererGL.prototype._applyBlendMode = function () { - if (this._cachedBlendMode === this.states.curBlendMode) { - return; - } - const gl = this.GL; - switch (this.states.curBlendMode) { - case constants.BLEND: - gl.blendEquation(gl.FUNC_ADD); - gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA); - break; - case constants.ADD: - gl.blendEquation(gl.FUNC_ADD); - gl.blendFunc(gl.ONE, gl.ONE); - break; - case constants.REMOVE: - gl.blendEquation(gl.FUNC_ADD); - gl.blendFunc(gl.ZERO, gl.ONE_MINUS_SRC_ALPHA); - break; - case constants.MULTIPLY: - gl.blendEquation(gl.FUNC_ADD); - gl.blendFunc(gl.DST_COLOR, gl.ONE_MINUS_SRC_ALPHA); - break; - case constants.SCREEN: - gl.blendEquation(gl.FUNC_ADD); - gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_COLOR); - break; - case constants.EXCLUSION: - gl.blendEquationSeparate(gl.FUNC_ADD, gl.FUNC_ADD); - gl.blendFuncSeparate( - gl.ONE_MINUS_DST_COLOR, - gl.ONE_MINUS_SRC_COLOR, - gl.ONE, - gl.ONE - ); - break; - case constants.REPLACE: - gl.blendEquation(gl.FUNC_ADD); - gl.blendFunc(gl.ONE, gl.ZERO); - break; - case constants.SUBTRACT: - gl.blendEquationSeparate(gl.FUNC_REVERSE_SUBTRACT, gl.FUNC_ADD); - gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ONE, gl.ONE_MINUS_SRC_ALPHA); - break; - case constants.DARKEST: - if (this.blendExt) { - gl.blendEquationSeparate( - this.blendExt.MIN || this.blendExt.MIN_EXT, - gl.FUNC_ADD - ); - gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ONE, gl.ONE); - } else { - console.warn( - 'blendMode(DARKEST) does not work in your browser in WEBGL mode.' - ); - } - break; - case constants.LIGHTEST: - if (this.blendExt) { - gl.blendEquationSeparate( - this.blendExt.MAX || this.blendExt.MAX_EXT, - gl.FUNC_ADD - ); - gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ONE, gl.ONE); - } else { - console.warn( - 'blendMode(LIGHTEST) does not work in your browser in WEBGL mode.' - ); - } - break; - default: - console.error( - 'Oops! Somehow RendererGL set curBlendMode to an unsupported mode.' - ); - break; - } - this._cachedBlendMode = this.states.curBlendMode; - }; - - RendererGL.prototype.shader = function(s) { + Renderer3D.prototype.shader = function(s) { // Always set the shader as a fill shader this.states.setValue('userFillShader', s); this.states.setValue('_useNormalMaterial', false); @@ -3794,32 +3670,32 @@ function material(p5, fn){ s.setDefaultUniforms(); }; - RendererGL.prototype.strokeShader = function(s) { + Renderer3D.prototype.strokeShader = function(s) { this.states.setValue('userStrokeShader', s); s.ensureCompiledOnContext(this); s.setDefaultUniforms(); }; - RendererGL.prototype.imageShader = function(s) { + Renderer3D.prototype.imageShader = function(s) { this.states.setValue('userImageShader', s); s.ensureCompiledOnContext(this); s.setDefaultUniforms(); }; - RendererGL.prototype.resetShader = function() { + Renderer3D.prototype.resetShader = function() { this.states.setValue('userFillShader', null); this.states.setValue('userStrokeShader', null); this.states.setValue('userImageShader', null); }; - RendererGL.prototype.texture = function(tex) { + Renderer3D.prototype.texture = function(tex) { this.states.setValue('drawMode', constants.TEXTURE); this.states.setValue('_useNormalMaterial', false); this.states.setValue('_tex', tex); this.states.setValue('fillColor', new Color([1, 1, 1])); }; - RendererGL.prototype.normalMaterial = function(...args) { + Renderer3D.prototype.normalMaterial = function(...args) { this.states.setValue('drawMode', constants.FILL); this.states.setValue('_useSpecularMaterial', false); this.states.setValue('_useEmissiveMaterial', false); @@ -3829,23 +3705,23 @@ function material(p5, fn){ this.states.setValue('strokeColor', null); }; - // RendererGL.prototype.ambientMaterial = function(v1, v2, v3) { + // Renderer3D.prototype.ambientMaterial = function(v1, v2, v3) { // } - // RendererGL.prototype.emissiveMaterial = function(v1, v2, v3, a) { + // Renderer3D.prototype.emissiveMaterial = function(v1, v2, v3, a) { // } - // RendererGL.prototype.specularMaterial = function(v1, v2, v3, alpha) { + // Renderer3D.prototype.specularMaterial = function(v1, v2, v3, alpha) { // } - RendererGL.prototype.shininess = function(shine) { + Renderer3D.prototype.shininess = function(shine) { if (shine < 1) { shine = 1; } this.states.setValue('_useShininess', shine); }; - RendererGL.prototype.metalness = function(metallic) { + Renderer3D.prototype.metalness = function(metallic) { const metalMix = 1 - Math.exp(-metallic / 100); this.states.setValue('_useMetalness', metalMix); }; diff --git a/src/webgl/p5.Camera.js b/src/webgl/p5.Camera.js index 9455d6a26e..300d2f1d47 100644 --- a/src/webgl/p5.Camera.js +++ b/src/webgl/p5.Camera.js @@ -7,7 +7,7 @@ import { Matrix } from '../math/p5.Matrix'; import { Vector } from '../math/p5.Vector'; import { Quat } from './p5.Quat'; -import { RendererGL } from './p5.RendererGL'; +import { Renderer3D } from '../core/p5.Renderer3D'; class Camera { constructor(renderer) { @@ -190,6 +190,7 @@ class Camera { * */ perspective(fovy, aspect, near, far) { + const range = this._renderer.zClipRange(); this.cameraType = arguments.length > 0 ? 'custom' : 'default'; if (typeof fovy === 'undefined') { fovy = this.defaultCameraFOV; @@ -234,10 +235,21 @@ class Camera { const f = 1.0 / Math.tan(this.cameraFOV / 2); const nf = 1.0 / (this.cameraNear - this.cameraFar); + let A, B; + if (range[0] === 0) { + // WebGPU clip space, z in [0, 1] + A = far * nf; + B = far * near * nf; + } else { + // WebGL clip space, z in [-1, 1] + A = (far + near) * nf; + B = (2 * far * near) * nf; + } + this.projMatrix.set(f / aspect, 0, 0, 0, 0, -f * this.yScale, 0, 0, - 0, 0, (far + near) * nf, -1, - 0, 0, (2 * far * near) * nf, 0); + 0, 0, A, -1, + 0, 0, B, 0); if (this._isActive()) { this._renderer.states.setValue('uPMatrix', this._renderer.states.uPMatrix.clone()); @@ -2451,7 +2463,7 @@ function camera(p5, fn){ */ fn.linePerspective = function (enable) { // p5._validateParameters('linePerspective', arguments); - if (!(this._renderer instanceof RendererGL)) { + if (!(this._renderer instanceof Renderer3D)) { throw new Error('linePerspective() must be called in WebGL mode.'); } return this._renderer.linePerspective(enable); @@ -2944,15 +2956,15 @@ function camera(p5, fn){ */ p5.Camera = Camera; - RendererGL.prototype.camera = function(...args) { + Renderer3D.prototype.camera = function(...args) { this.states.curCamera.camera(...args); }; - RendererGL.prototype.perspective = function(...args) { + Renderer3D.prototype.perspective = function(...args) { this.states.curCamera.perspective(...args); }; - RendererGL.prototype.linePerspective = function(enable) { + Renderer3D.prototype.linePerspective = function(enable) { if (enable !== undefined) { // Set the line perspective if enable is provided this.states.curCamera.useLinePerspective = enable; @@ -2962,15 +2974,15 @@ function camera(p5, fn){ } }; - RendererGL.prototype.ortho = function(...args) { + Renderer3D.prototype.ortho = function(...args) { this.states.curCamera.ortho(...args); }; - RendererGL.prototype.frustum = function(...args) { + Renderer3D.prototype.frustum = function(...args) { this.states.curCamera.frustum(...args); }; - RendererGL.prototype.createCamera = function() { + Renderer3D.prototype.createCamera = function() { // compute default camera settings, then set a default camera const _cam = new Camera(this); _cam._computeCameraDefaultSettings(); @@ -2979,7 +2991,7 @@ function camera(p5, fn){ return _cam; }; - RendererGL.prototype.setCamera = function(cam) { + Renderer3D.prototype.setCamera = function(cam) { this.states.setValue('curCamera', cam); // set the projection matrix (which is not normally updated each frame) diff --git a/src/webgl/p5.Framebuffer.js b/src/webgl/p5.Framebuffer.js index a3d6875068..0839fb329c 100644 --- a/src/webgl/p5.Framebuffer.js +++ b/src/webgl/p5.Framebuffer.js @@ -5,11 +5,9 @@ import * as constants from '../core/constants'; import { RGB, RGBA } from '../color/creating_reading'; -import { checkWebGLCapabilities } from './p5.Texture'; -import { readPixelsWebGL, readPixelWebGL } from './p5.RendererGL'; +import { checkWebGLCapabilities } from './utils'; import { Camera } from './p5.Camera'; import { Texture } from './p5.Texture'; -import { Image } from '../image/p5.Image'; const constrain = (n, low, high) => Math.max(Math.min(n, high), low); @@ -52,7 +50,7 @@ class FramebufferTexture { } rawTexture() { - return this.framebuffer[this.property]; + return { texture: this.framebuffer[this.property] }; } } @@ -69,7 +67,7 @@ class Framebuffer { this.format = settings.format || constants.UNSIGNED_BYTE; this.channels = settings.channels || ( - this.renderer._pInst._glAttributes.alpha + this.renderer.defaultFramebufferAlpha() ? RGBA : RGB ); @@ -77,7 +75,7 @@ class Framebuffer { this.depthFormat = settings.depthFormat || constants.FLOAT; this.textureFiltering = settings.textureFiltering || constants.LINEAR; if (settings.antialias === undefined) { - this.antialiasSamples = this.renderer._pInst._glAttributes.antialias + this.antialiasSamples = this.renderer.defaultFramebufferAntialias() ? 2 : 0; } else if (typeof settings.antialias === 'number') { @@ -86,13 +84,11 @@ class Framebuffer { this.antialiasSamples = settings.antialias ? 2 : 0; } this.antialias = this.antialiasSamples > 0; - if (this.antialias && this.renderer.webglVersion !== constants.WEBGL2) { - console.warn('Antialiasing is unsupported in a WebGL 1 context'); + if (this.antialias && !this.renderer.supportsFramebufferAntialias()) { + console.warn('Framebuffer antialiasing is unsupported in this context'); this.antialias = false; } this.density = settings.density || this.renderer._pixelDensity; - const gl = this.renderer.GL; - this.gl = gl; if (settings.width && settings.height) { const dimensions = this.renderer._adjustDimensions(settings.width, settings.height); @@ -111,7 +107,8 @@ class Framebuffer { this.height = this.renderer.height; this._autoSized = true; } - this._checkIfFormatsAvailable(); + // Let renderer validate and adjust formats for this context + this.renderer.validateFramebufferFormats(this); if (settings.stencil && !this.useDepth) { console.warn('A stencil buffer can only be used if also using depth. Since the framebuffer has no depth buffer, the stencil buffer will be ignored.'); @@ -119,16 +116,8 @@ class Framebuffer { this.useStencil = this.useDepth && (settings.stencil === undefined ? true : settings.stencil); - this.framebuffer = gl.createFramebuffer(); - if (!this.framebuffer) { - throw new Error('Unable to create a framebuffer'); - } - if (this.antialias) { - this.aaFramebuffer = gl.createFramebuffer(); - if (!this.aaFramebuffer) { - throw new Error('Unable to create a framebuffer for antialiasing'); - } - } + // Let renderer create framebuffer resources with antialiasing support + this.renderer.createFramebufferResources(this); this._recreateTextures(); @@ -465,6 +454,10 @@ class Framebuffer { } } + _deleteTextures() { + this.renderer.deleteFramebufferTextures(this); + } + /** * Creates new textures and renderbuffers given the current size of the * framebuffer. @@ -472,121 +465,14 @@ class Framebuffer { * @private */ _recreateTextures() { - const gl = this.gl; - this._updateSize(); - const prevBoundTexture = gl.getParameter(gl.TEXTURE_BINDING_2D); - const prevBoundFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); - - const colorTexture = gl.createTexture(); - if (!colorTexture) { - throw new Error('Unable to create color texture'); - } - gl.bindTexture(gl.TEXTURE_2D, colorTexture); - const colorFormat = this._glColorFormat(); - gl.texImage2D( - gl.TEXTURE_2D, - 0, - colorFormat.internalFormat, - this.width * this.density, - this.height * this.density, - 0, - colorFormat.format, - colorFormat.type, - null - ); - this.colorTexture = colorTexture; - gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer); - gl.framebufferTexture2D( - gl.FRAMEBUFFER, - gl.COLOR_ATTACHMENT0, - gl.TEXTURE_2D, - colorTexture, - 0 - ); - - if (this.useDepth) { - // Create the depth texture - const depthTexture = gl.createTexture(); - if (!depthTexture) { - throw new Error('Unable to create depth texture'); - } - const depthFormat = this._glDepthFormat(); - gl.bindTexture(gl.TEXTURE_2D, depthTexture); - gl.texImage2D( - gl.TEXTURE_2D, - 0, - depthFormat.internalFormat, - this.width * this.density, - this.height * this.density, - 0, - depthFormat.format, - depthFormat.type, - null - ); - - gl.framebufferTexture2D( - gl.FRAMEBUFFER, - this.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, - gl.TEXTURE_2D, - depthTexture, - 0 - ); - this.depthTexture = depthTexture; - } - - // Create separate framebuffer for antialiasing - if (this.antialias) { - this.colorRenderbuffer = gl.createRenderbuffer(); - gl.bindRenderbuffer(gl.RENDERBUFFER, this.colorRenderbuffer); - gl.renderbufferStorageMultisample( - gl.RENDERBUFFER, - Math.max( - 0, - Math.min(this.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) - ), - colorFormat.internalFormat, - this.width * this.density, - this.height * this.density - ); - - if (this.useDepth) { - const depthFormat = this._glDepthFormat(); - this.depthRenderbuffer = gl.createRenderbuffer(); - gl.bindRenderbuffer(gl.RENDERBUFFER, this.depthRenderbuffer); - gl.renderbufferStorageMultisample( - gl.RENDERBUFFER, - Math.max( - 0, - Math.min(this.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) - ), - depthFormat.internalFormat, - this.width * this.density, - this.height * this.density - ); - } - - gl.bindFramebuffer(gl.FRAMEBUFFER, this.aaFramebuffer); - gl.framebufferRenderbuffer( - gl.FRAMEBUFFER, - gl.COLOR_ATTACHMENT0, - gl.RENDERBUFFER, - this.colorRenderbuffer - ); - if (this.useDepth) { - gl.framebufferRenderbuffer( - gl.FRAMEBUFFER, - this.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, - gl.RENDERBUFFER, - this.depthRenderbuffer - ); - } - } + // Let renderer handle texture creation and framebuffer setup + this.renderer.recreateFramebufferTextures(this); if (this.useDepth) { this.depth = new FramebufferTexture(this, 'depthTexture'); - const depthFilter = gl.NEAREST; + const depthFilter = constants.NEAREST; this.depthP5Texture = new Texture( this.renderer, this.depth, @@ -600,8 +486,8 @@ class Framebuffer { this.color = new FramebufferTexture(this, 'colorTexture'); const filter = this.textureFiltering === constants.LINEAR - ? gl.LINEAR - : gl.NEAREST; + ? constants.LINEAR + : constants.NEAREST; this.colorP5Texture = new Texture( this.renderer, this.color, @@ -611,131 +497,6 @@ class Framebuffer { } ); this.renderer.textures.set(this.color, this.colorP5Texture); - - gl.bindTexture(gl.TEXTURE_2D, prevBoundTexture); - gl.bindFramebuffer(gl.FRAMEBUFFER, prevBoundFramebuffer); - } - - /** - * To create a WebGL texture, one needs to supply three pieces of information: - * the type (the data type each channel will be stored as, e.g. int or float), - * the format (the color channels that will each be stored in the previously - * specified type, e.g. rgb or rgba), and the internal format (the specifics - * of how data for each channel, in the aforementioned type, will be packed - * together, such as how many bits to use, e.g. RGBA32F or RGB565.) - * - * The format and channels asked for by the user hint at what these values - * need to be, and the WebGL version affects what options are avaiable. - * This method returns the values for these three properties, given the - * framebuffer's settings. - * - * @private - */ - _glColorFormat() { - let type, format, internalFormat; - const gl = this.gl; - - if (this.format === constants.FLOAT) { - type = gl.FLOAT; - } else if (this.format === constants.HALF_FLOAT) { - type = this.renderer.webglVersion === constants.WEBGL2 - ? gl.HALF_FLOAT - : gl.getExtension('OES_texture_half_float').HALF_FLOAT_OES; - } else { - type = gl.UNSIGNED_BYTE; - } - - if (this.channels === RGBA) { - format = gl.RGBA; - } else { - format = gl.RGB; - } - - if (this.renderer.webglVersion === constants.WEBGL2) { - // https://webgl2fundamentals.org/webgl/lessons/webgl-data-textures.html - const table = { - [gl.FLOAT]: { - [gl.RGBA]: gl.RGBA32F - // gl.RGB32F is not available in Firefox without an alpha channel - }, - [gl.HALF_FLOAT]: { - [gl.RGBA]: gl.RGBA16F - // gl.RGB16F is not available in Firefox without an alpha channel - }, - [gl.UNSIGNED_BYTE]: { - [gl.RGBA]: gl.RGBA8, // gl.RGBA4 - [gl.RGB]: gl.RGB8 // gl.RGB565 - } - }; - internalFormat = table[type][format]; - } else if (this.format === constants.HALF_FLOAT) { - internalFormat = gl.RGBA; - } else { - internalFormat = format; - } - - return { internalFormat, format, type }; - } - - /** - * To create a WebGL texture, one needs to supply three pieces of information: - * the type (the data type each channel will be stored as, e.g. int or float), - * the format (the color channels that will each be stored in the previously - * specified type, e.g. rgb or rgba), and the internal format (the specifics - * of how data for each channel, in the aforementioned type, will be packed - * together, such as how many bits to use, e.g. RGBA32F or RGB565.) - * - * This method takes into account the settings asked for by the user and - * returns values for these three properties that can be used for the - * texture storing depth information. - * - * @private - */ - _glDepthFormat() { - let type, format, internalFormat; - const gl = this.gl; - - if (this.useStencil) { - if (this.depthFormat === constants.FLOAT) { - type = gl.FLOAT_32_UNSIGNED_INT_24_8_REV; - } else if (this.renderer.webglVersion === constants.WEBGL2) { - type = gl.UNSIGNED_INT_24_8; - } else { - type = gl.getExtension('WEBGL_depth_texture').UNSIGNED_INT_24_8_WEBGL; - } - } else { - if (this.depthFormat === constants.FLOAT) { - type = gl.FLOAT; - } else { - type = gl.UNSIGNED_INT; - } - } - - if (this.useStencil) { - format = gl.DEPTH_STENCIL; - } else { - format = gl.DEPTH_COMPONENT; - } - - if (this.useStencil) { - if (this.depthFormat === constants.FLOAT) { - internalFormat = gl.DEPTH32F_STENCIL8; - } else if (this.renderer.webglVersion === constants.WEBGL2) { - internalFormat = gl.DEPTH24_STENCIL8; - } else { - internalFormat = gl.DEPTH_STENCIL; - } - } else if (this.renderer.webglVersion === constants.WEBGL2) { - if (this.depthFormat === constants.FLOAT) { - internalFormat = gl.DEPTH_COMPONENT32F; - } else { - internalFormat = gl.DEPTH_COMPONENT24; - } - } else { - internalFormat = gl.DEPTH_COMPONENT; - } - - return { internalFormat, format, type }; } /** @@ -774,17 +535,7 @@ class Framebuffer { * @private */ _handleResize() { - const oldColor = this.color; - const oldDepth = this.depth; - const oldColorRenderbuffer = this.colorRenderbuffer; - const oldDepthRenderbuffer = this.depthRenderbuffer; - - this._deleteTexture(oldColor); - if (oldDepth) this._deleteTexture(oldDepth); - const gl = this.gl; - if (oldColorRenderbuffer) gl.deleteRenderbuffer(oldColorRenderbuffer); - if (oldDepthRenderbuffer) gl.deleteRenderbuffer(oldDepthRenderbuffer); - + this._deleteTextures(); this._recreateTextures(); this.defaultCamera._resize(); } @@ -912,20 +663,6 @@ class Framebuffer { return cam; } - /** - * Given a raw texture wrapper, delete its stored texture from WebGL memory, - * and remove it from p5's list of active textures. - * - * @param {p5.FramebufferTexture} texture - * @private - */ - _deleteTexture(texture) { - const gl = this.gl; - gl.deleteTexture(texture.rawTexture()); - - this.renderer.textures.delete(texture); - } - /** * Deletes the framebuffer from GPU memory. * @@ -995,19 +732,11 @@ class Framebuffer { * */ remove() { - const gl = this.gl; - this._deleteTexture(this.color); - if (this.depth) this._deleteTexture(this.depth); - gl.deleteFramebuffer(this.framebuffer); - if (this.aaFramebuffer) { - gl.deleteFramebuffer(this.aaFramebuffer); - } - if (this.depthRenderbuffer) { - gl.deleteRenderbuffer(this.depthRenderbuffer); - } - if (this.colorRenderbuffer) { - gl.deleteRenderbuffer(this.colorRenderbuffer); - } + this._deleteTextures(); + + // Let renderer clean up framebuffer resources + this.renderer.deleteFramebufferResources(this); + this.renderer.framebuffers.delete(this); } @@ -1094,14 +823,7 @@ class Framebuffer { * @private */ _framebufferToBind() { - if (this.antialias) { - // If antialiasing, draw to an antialiased renderbuffer rather - // than directly to the texture. In end() we will copy from the - // renderbuffer to the texture. - return this.aaFramebuffer; - } else { - return this.framebuffer; - } + return this.renderer.getFramebufferToBind(this); } /** @@ -1110,40 +832,9 @@ class Framebuffer { * @param {'colorTexutre'|'depthTexture'} property The property to update */ _update(property) { - if (this.dirty[property] && this.antialias) { - const gl = this.gl; - gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this.aaFramebuffer); - gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this.framebuffer); - const partsToCopy = { - colorTexture: [gl.COLOR_BUFFER_BIT, this.colorP5Texture.glMagFilter] - }; - if (this.useDepth) { - partsToCopy.depthTexture = [ - gl.DEPTH_BUFFER_BIT, - this.depthP5Texture.glMagFilter - ]; - } - const [flag, filter] = partsToCopy[property]; - gl.blitFramebuffer( - 0, - 0, - this.width * this.density, - this.height * this.density, - 0, - 0, - this.width * this.density, - this.height * this.density, - flag, - filter - ); + if (this.dirty[property]) { + this.renderer.updateFramebufferTexture(this, property); this.dirty[property] = false; - - const activeFbo = this.renderer.activeFramebuffer(); - if (activeFbo) { - gl.bindFramebuffer(gl.FRAMEBUFFER, activeFbo._framebufferToBind()); - } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); - } } } @@ -1153,8 +844,7 @@ class Framebuffer { * @private */ _beforeBegin() { - const gl = this.gl; - gl.bindFramebuffer(gl.FRAMEBUFFER, this._framebufferToBind()); + this.renderer.bindFramebuffer(this); this.renderer.viewport( this.width * this.density, this.height * this.density @@ -1230,7 +920,7 @@ class Framebuffer { if (this.prevFramebuffer) { this.prevFramebuffer._beforeBegin(); } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); + this.renderer.bindFramebuffer(null); this.renderer.viewport( this.renderer._origViewport.width, this.renderer._origViewport.height @@ -1347,25 +1037,19 @@ class Framebuffer { */ loadPixels() { this._update('colorTexture'); - const gl = this.gl; - const prevFramebuffer = this.renderer.activeFramebuffer(); - gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer); - const colorFormat = this._glColorFormat(); - this.pixels = readPixelsWebGL( - this.pixels, - gl, - this.framebuffer, - 0, - 0, - this.width * this.density, - this.height * this.density, - colorFormat.format, - colorFormat.type - ); - if (prevFramebuffer) { - gl.bindFramebuffer(gl.FRAMEBUFFER, prevFramebuffer._framebufferToBind()); + const result = this.renderer.readFramebufferPixels(this); + + // Check if renderer returned a Promise (WebGPU) or data directly (WebGL) + if (result && typeof result.then === 'function') { + // WebGPU async case - return Promise + return result.then(pixels => { + this.pixels = pixels; + return pixels; + }); } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); + // WebGL sync case - assign directly + this.pixels = result; + return result; } } @@ -1407,7 +1091,7 @@ class Framebuffer { get(x, y, w, h) { this._update('colorTexture'); // p5._validateParameters('p5.Framebuffer.get', arguments); - const colorFormat = this._glColorFormat(); + if (x === undefined && y === undefined) { x = 0; y = 0; @@ -1422,14 +1106,7 @@ class Framebuffer { y = constrain(y, 0, this.height - 1); } - return readPixelWebGL( - this.gl, - this.framebuffer, - x * this.density, - y * this.density, - colorFormat.format, - colorFormat.type - ); + return this.renderer.readFramebufferPixel(this, x * this.density, y * this.density); } x = constrain(x, 0, this.width - 1); @@ -1437,60 +1114,7 @@ class Framebuffer { w = constrain(w, 1, this.width - x); h = constrain(h, 1, this.height - y); - const rawData = readPixelsWebGL( - undefined, - this.gl, - this.framebuffer, - x * this.density, - y * this.density, - w * this.density, - h * this.density, - colorFormat.format, - colorFormat.type - ); - // Framebuffer data might be either a Uint8Array or Float32Array - // depending on its format, and it may or may not have an alpha channel. - // To turn it into an image, we have to normalize the data into a - // Uint8ClampedArray with alpha. - const fullData = new Uint8ClampedArray( - w * h * this.density * this.density * 4 - ); - - // Default channels that aren't in the framebuffer (e.g. alpha, if the - // framebuffer is in RGB mode instead of RGBA) to 255 - fullData.fill(255); - - const channels = colorFormat.type === this.gl.RGB ? 3 : 4; - for (let y = 0; y < h * this.density; y++) { - for (let x = 0; x < w * this.density; x++) { - for (let channel = 0; channel < 4; channel++) { - const idx = (y * w * this.density + x) * 4 + channel; - if (channel < channels) { - // Find the index of this pixel in `rawData`, which might have a - // different number of channels - const rawDataIdx = channels === 4 - ? idx - : (y * w * this.density + x) * channels + channel; - fullData[idx] = rawData[rawDataIdx]; - } - } - } - } - - // Create an image from the data - const region = new Image(w * this.density, h * this.density); - region.imageData = region.canvas.getContext('2d').createImageData( - region.width, - region.height - ); - region.imageData.data.set(fullData); - region.pixels = region.imageData.data; - region.updatePixels(); - if (this.density !== 1) { - // TODO: support get() at a pixel density > 1 - region.resize(w, h); - } - return region; + return this.renderer.readFramebufferRegion(this, x, y, w, h); } /** @@ -1542,85 +1166,8 @@ class Framebuffer { * */ updatePixels() { - const gl = this.gl; - this.colorP5Texture.bindTexture(); - const colorFormat = this._glColorFormat(); - - const channels = colorFormat.format === gl.RGBA ? 4 : 3; - const len = - this.width * this.height * this.density * this.density * channels; - const TypedArrayClass = colorFormat.type === gl.UNSIGNED_BYTE - ? Uint8Array - : Float32Array; - if ( - !(this.pixels instanceof TypedArrayClass) || this.pixels.length !== len - ) { - throw new Error( - 'The pixels array has not been set correctly. Please call loadPixels() before updatePixels().' - ); - } - - gl.texImage2D( - gl.TEXTURE_2D, - 0, - colorFormat.internalFormat, - this.width * this.density, - this.height * this.density, - 0, - colorFormat.format, - colorFormat.type, - this.pixels - ); - this.colorP5Texture.unbindTexture(); - this.dirty.colorTexture = false; - - const prevFramebuffer = this.renderer.activeFramebuffer(); - if (this.antialias) { - // We need to make sure the antialiased framebuffer also has the updated - // pixels so that if more is drawn to it, it goes on top of the updated - // pixels instead of replacing them. - // We can't blit the framebuffer to the multisampled antialias - // framebuffer to leave both in the same state, so instead we have - // to use image() to put the framebuffer texture onto the antialiased - // framebuffer. - this.begin(); - this.renderer.push(); - // this.renderer.imageMode(constants.CENTER); - this.renderer.states.setValue('imageMode', constants.CORNER); - this.renderer.setCamera(this.filterCamera); - this.renderer.resetMatrix(); - this.renderer.states.setValue('strokeColor', null); - this.renderer.clear(); - this.renderer._drawingFilter = true; - this.renderer.image( - this, - 0, 0, - this.width, this.height, - -this.renderer.width / 2, -this.renderer.height / 2, - this.renderer.width, this.renderer.height - ); - this.renderer._drawingFilter = false; - this.renderer.pop(); - if (this.useDepth) { - gl.clearDepth(1); - gl.clear(gl.DEPTH_BUFFER_BIT); - } - this.end(); - } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer); - if (this.useDepth) { - gl.clearDepth(1); - gl.clear(gl.DEPTH_BUFFER_BIT); - } - if (prevFramebuffer) { - gl.bindFramebuffer( - gl.FRAMEBUFFER, - prevFramebuffer._framebufferToBind() - ); - } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); - } - } + // Let renderer handle the pixel update process + this.renderer.updateFramebufferPixels(this); } } diff --git a/src/webgl/p5.Geometry.js b/src/webgl/p5.Geometry.js index a5691e7854..7dd8cb0522 100644 --- a/src/webgl/p5.Geometry.js +++ b/src/webgl/p5.Geometry.js @@ -1420,6 +1420,7 @@ class Geometry { for (let i = 0; i < this.edges.length; i++) { const prevEdge = this.edges[i - 1]; const currEdge = this.edges[i]; + const isPoint = currEdge[0] === currEdge[1]; const begin = this.vertices[currEdge[0]]; const end = this.vertices[currEdge[1]]; const prevColor = (this.vertexStrokeColors.length > 0 && prevEdge) @@ -1440,10 +1441,12 @@ class Geometry { (currEdge[1] + 1) * 4 ) : [0, 0, 0, 0]; - const dir = end - .copy() - .sub(begin) - .normalize(); + const dir = isPoint + ? new Vector(0, 1, 0) + : end + .copy() + .sub(begin) + .normalize(); const dirOK = dir.magSq() > 0; if (dirOK) { this._addSegment(begin, end, fromColor, toColor, dir); @@ -1463,6 +1466,9 @@ class Geometry { this._addJoin(begin, lastValidDir, dir, fromColor); } } + } else if (isPoint) { + this._addCap(begin, dir.copy().mult(-1), fromColor); + this._addCap(begin, dir, fromColor); } else { // Start a new line if (dirOK && !connected.has(currEdge[0])) { @@ -1484,7 +1490,7 @@ class Geometry { }); } } - if (lastValidDir && !connected.has(prevEdge[1])) { + if (!isPoint && lastValidDir && !connected.has(prevEdge[1])) { const existingCap = potentialCaps.get(prevEdge[1]); if (existingCap) { this._addJoin( diff --git a/src/webgl/p5.RenderBuffer.js b/src/webgl/p5.RenderBuffer.js index 6beaa690a5..1504e90900 100644 --- a/src/webgl/p5.RenderBuffer.js +++ b/src/webgl/p5.RenderBuffer.js @@ -8,6 +8,11 @@ class RenderBuffer { this.map = map; // optional, a transformation function to apply to src } + default(cb) { + this.default = cb; + return this; + } + /** * Enables and binds the buffers used by shader when the appropriate data exists in geometry. * Must always be done prior to drawing geometry in WebGL. @@ -16,50 +21,7 @@ class RenderBuffer { * @private */ _prepareBuffer(geometry, shader) { - const attributes = shader.attributes; - const gl = this._renderer.GL; - const glBuffers = this._renderer._getOrMakeCachedBuffers(geometry); - - // loop through each of the buffer definitions - const attr = attributes[this.attr]; - if (!attr) { - return; - } - // check if the geometry has the appropriate source array - let buffer = glBuffers[this.dst]; - const src = geometry[this.src]; - if (src && src.length > 0) { - // check if we need to create the GL buffer - const createBuffer = !buffer; - if (createBuffer) { - // create and remember the buffer - glBuffers[this.dst] = buffer = gl.createBuffer(); - } - // bind the buffer - gl.bindBuffer(gl.ARRAY_BUFFER, buffer); - - // check if we need to fill the buffer with data - if (createBuffer || geometry.dirtyFlags[this.src] !== false) { - const map = this.map; - // get the values from the geometry, possibly transformed - const values = map ? map(src) : src; - // fill the buffer with the values - this._renderer._bindBuffer(buffer, gl.ARRAY_BUFFER, values); - // mark the geometry's source array as clean - geometry.dirtyFlags[this.src] = false; - } - // enable the attribute - shader.enableAttrib(attr, this.size); - } else { - const loc = attr.location; - if (loc === -1 || !this._renderer.registerEnabled.has(loc)) { - return; - } - // Disable register corresponding to unused attribute - gl.disableVertexAttribArray(loc); - // Record register availability - this._renderer.registerEnabled.delete(loc); - } + this._renderer._prepareBuffer(this, geometry, shader); } } diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index 3705bed131..77c8aa57de 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -1,75 +1,54 @@ import * as constants from '../core/constants'; -import GeometryBuilder from './GeometryBuilder'; -import { Renderer } from '../core/p5.Renderer'; -import { Matrix } from '../math/p5.Matrix'; -import { Camera } from './p5.Camera'; -import { Vector } from '../math/p5.Vector'; -import { RenderBuffer } from './p5.RenderBuffer'; -import { DataArray } from './p5.DataArray'; +import { + getWebGLShaderAttributes, + getWebGLUniformMetadata, + populateGLSLHooks, + readPixelsWebGL, + readPixelWebGL, + setWebGLTextureParams, + setWebGLUniformValue, + checkWebGLCapabilities +} from './utils'; +import { Renderer3D, getStrokeDefs } from '../core/p5.Renderer3D'; import { Shader } from './p5.Shader'; -import { Image } from '../image/p5.Image'; -import { Texture, MipmapTexture } from './p5.Texture'; +import { MipmapTexture } from './p5.Texture'; import { Framebuffer } from './p5.Framebuffer'; -import { Graphics } from '../core/p5.Graphics'; -import { Element } from '../dom/p5.Element'; -import { ShapeBuilder } from './ShapeBuilder'; -import { GeometryBufferCache } from './GeometryBufferCache'; -import { filterParamDefaults } from '../image/const'; - -import filterBaseVert from './shaders/filters/base.vert'; -import lightingShader from './shaders/lighting.glsl'; -import webgl2CompatibilityShader from './shaders/webgl2Compatibility.glsl'; -import normalVert from './shaders/normal.vert'; -import normalFrag from './shaders/normal.frag'; -import basicFrag from './shaders/basic.frag'; -import sphereMappingFrag from './shaders/sphereMapping.frag'; -import lightVert from './shaders/light.vert'; -import lightTextureFrag from './shaders/light_texture.frag'; -import phongVert from './shaders/phong.vert'; -import phongFrag from './shaders/phong.frag'; -import fontVert from './shaders/font.vert'; -import fontFrag from './shaders/font.frag'; -import lineVert from './shaders/line.vert'; -import lineFrag from './shaders/line.frag'; -import pointVert from './shaders/point.vert'; -import pointFrag from './shaders/point.frag'; -import imageLightVert from './shaders/imageLight.vert'; -import imageLightDiffusedFrag from './shaders/imageLightDiffused.frag'; -import imageLightSpecularFrag from './shaders/imageLightSpecular.frag'; - -import filterBaseFrag from './shaders/filters/base.frag'; -import filterGrayFrag from './shaders/filters/gray.frag'; -import filterErodeFrag from './shaders/filters/erode.frag'; -import filterDilateFrag from './shaders/filters/dilate.frag'; -import filterBlurFrag from './shaders/filters/blur.frag'; -import filterPosterizeFrag from './shaders/filters/posterize.frag'; -import filterOpaqueFrag from './shaders/filters/opaque.frag'; -import filterInvertFrag from './shaders/filters/invert.frag'; -import filterThresholdFrag from './shaders/filters/threshold.frag'; -import filterShaderVert from './shaders/filters/default.vert'; -import { PrimitiveToVerticesConverter } from '../shape/custom_shapes'; -import { Color } from '../color/p5.Color'; - -const STROKE_CAP_ENUM = {}; -const STROKE_JOIN_ENUM = {}; -let lineDefs = ''; -const defineStrokeCapEnum = function (key, val) { - lineDefs += `#define STROKE_CAP_${key} ${val}\n`; - STROKE_CAP_ENUM[constants[key]] = val; -}; -const defineStrokeJoinEnum = function (key, val) { - lineDefs += `#define STROKE_JOIN_${key} ${val}\n`; - STROKE_JOIN_ENUM[constants[key]] = val; -}; +import { RGB, RGBA } from '../color/creating_reading'; +import { Image } from '../image/p5.Image'; -// Define constants in line shaders for each type of cap/join, and also record -// the values in JS objects -defineStrokeCapEnum('ROUND', 0); -defineStrokeCapEnum('PROJECT', 1); -defineStrokeCapEnum('SQUARE', 2); -defineStrokeJoinEnum('ROUND', 0); -defineStrokeJoinEnum('MITER', 1); -defineStrokeJoinEnum('BEVEL', 2); +import filterBaseVert from "./shaders/filters/base.vert"; +import lightingShader from "./shaders/lighting.glsl"; +import webgl2CompatibilityShader from "./shaders/webgl2Compatibility.glsl"; +import normalVert from "./shaders/normal.vert"; +import normalFrag from "./shaders/normal.frag"; +import basicFrag from "./shaders/basic.frag"; +import sphereMappingFrag from "./shaders/sphereMapping.frag"; +import lightVert from "./shaders/light.vert"; +import lightTextureFrag from "./shaders/light_texture.frag"; +import phongVert from "./shaders/phong.vert"; +import phongFrag from "./shaders/phong.frag"; +import fontVert from "./shaders/font.vert"; +import fontFrag from "./shaders/font.frag"; +import lineVert from "./shaders/line.vert"; +import lineFrag from "./shaders/line.frag"; +import pointVert from "./shaders/point.vert"; +import pointFrag from "./shaders/point.frag"; +import imageLightVert from "./shaders/imageLight.vert"; +import imageLightDiffusedFrag from "./shaders/imageLightDiffused.frag"; +import imageLightSpecularFrag from "./shaders/imageLightSpecular.frag"; + +import filterBaseFrag from "./shaders/filters/base.frag"; +import filterGrayFrag from "./shaders/filters/gray.frag"; +import filterErodeFrag from "./shaders/filters/erode.frag"; +import filterDilateFrag from "./shaders/filters/dilate.frag"; +import filterBlurFrag from "./shaders/filters/blur.frag"; +import filterPosterizeFrag from "./shaders/filters/posterize.frag"; +import filterOpaqueFrag from "./shaders/filters/opaque.frag"; +import filterInvertFrag from "./shaders/filters/invert.frag"; +import filterThresholdFrag from "./shaders/filters/threshold.frag"; +import filterShaderVert from "./shaders/filters/default.vert"; + +const { lineDefs } = getStrokeDefs((n, v) => `#define ${n} ${v}\n`); const defaultShaders = { normalVert, @@ -90,7 +69,7 @@ const defaultShaders = { imageLightDiffusedFrag, imageLightSpecularFrag, filterBaseVert, - filterBaseFrag + filterBaseFrag, }; let sphereMapping = defaultShaders.sphereMappingFrag; for (const key in defaultShaders) { @@ -105,7 +84,7 @@ const filterShaderFrags = { [constants.POSTERIZE]: filterPosterizeFrag, [constants.OPAQUE]: filterOpaqueFrag, [constants.INVERT]: filterInvertFrag, - [constants.THRESHOLD]: filterThresholdFrag + [constants.THRESHOLD]: filterThresholdFrag, }; /** @@ -116,322 +95,15 @@ const filterShaderFrags = { * @todo extend class to include public method for offscreen * rendering (FBO). */ -class RendererGL extends Renderer { - constructor(pInst, w, h, isMainCanvas, elt, attr) { - super(pInst, w, h, isMainCanvas); - - // Create new canvas - this.canvas = this.elt = elt || document.createElement('canvas'); - this._setAttributeDefaults(pInst); - this._initContext(); - // This redundant property is useful in reminding you that you are - // interacting with WebGLRenderingContext, still worth considering future removal - this.GL = this.drawingContext; - - if (this._isMainCanvas) { - // for pixel method sharing with pimage - this._pInst._curElement = this; - this._pInst.canvas = this.canvas; - } else { - // hide if offscreen buffer by default - this.canvas.style.display = 'none'; - } - this.elt.id = 'defaultCanvas0'; - this.elt.classList.add('p5Canvas'); - - // Set and return p5.Element - this.wrappedElt = new Element(this.elt, this._pInst); - - // Extend renderer with methods of p5.Element with getters - for (const p of Object.getOwnPropertyNames(Element.prototype)) { - if (p !== 'constructor' && p[0] !== '_') { - Object.defineProperty(this, p, { - get() { - return this.wrappedElt[p]; - } - }); - } - } - - const dimensions = this._adjustDimensions(w, h); - w = dimensions.adjustedWidth; - h = dimensions.adjustedHeight; - - this.width = w; - this.height = h; - - // Set canvas size - this.elt.width = w * this._pixelDensity; - this.elt.height = h * this._pixelDensity; - this.elt.style.width = `${w}px`; - this.elt.style.height = `${h}px`; - this._origViewport = { - width: this.GL.drawingBufferWidth, - height: this.GL.drawingBufferHeight - }; - this.viewport(this._origViewport.width, this._origViewport.height); +class RendererGL extends Renderer3D { + constructor(pInst, w, h, isMainCanvas, elt) { + super(pInst, w, h, isMainCanvas, elt); - // Attach canvas element to DOM - if (this._pInst._userNode) { - // user input node case - this._pInst._userNode.appendChild(this.elt); - } else { - //create main element - if (document.getElementsByTagName('main').length === 0) { - let m = document.createElement('main'); - document.body.appendChild(m); - } - //append canvas to main - document.getElementsByTagName('main')[0].appendChild(this.elt); - } - - this.isP3D = true; //lets us know we're in 3d mode - - // When constructing a new Geometry, this will represent the builder - this.geometryBuilder = undefined; - - // Push/pop state - this.states.uModelMatrix = new Matrix(4); - this.states.uViewMatrix = new Matrix(4); - this.states.uPMatrix = new Matrix(4); - - this.states.curCamera = new Camera(this); - this.states.uPMatrix.set(this.states.curCamera.projMatrix); - this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); - - this.states.enableLighting = false; - this.states.ambientLightColors = []; - this.states.specularColors = [1, 1, 1]; - this.states.directionalLightDirections = []; - this.states.directionalLightDiffuseColors = []; - this.states.directionalLightSpecularColors = []; - this.states.pointLightPositions = []; - this.states.pointLightDiffuseColors = []; - this.states.pointLightSpecularColors = []; - this.states.spotLightPositions = []; - this.states.spotLightDirections = []; - this.states.spotLightDiffuseColors = []; - this.states.spotLightSpecularColors = []; - this.states.spotLightAngle = []; - this.states.spotLightConc = []; - this.states.activeImageLight = null; - - this.states.curFillColor = [1, 1, 1, 1]; - this.states.curAmbientColor = [1, 1, 1, 1]; - this.states.curSpecularColor = [0, 0, 0, 0]; - this.states.curEmissiveColor = [0, 0, 0, 0]; - this.states.curStrokeColor = [0, 0, 0, 1]; - - this.states.curBlendMode = constants.BLEND; - - this.states._hasSetAmbient = false; - this.states._useSpecularMaterial = false; - this.states._useEmissiveMaterial = false; - this.states._useNormalMaterial = false; - this.states._useShininess = 1; - this.states._useMetalness = 0; - - this.states.tint = [255, 255, 255, 255]; - - this.states.constantAttenuation = 1; - this.states.linearAttenuation = 0; - this.states.quadraticAttenuation = 0; - - this.states._currentNormal = new Vector(0, 0, 1); - - this.states.drawMode = constants.FILL; - - this.states._tex = null; - this.states.textureMode = constants.IMAGE; - this.states.textureWrapX = constants.CLAMP; - this.states.textureWrapY = constants.CLAMP; - - // erasing - this._isErasing = false; - - // simple lines - this._simpleLines = false; - - // clipping - this._clipDepths = []; - this._isClipApplied = false; - this._stencilTestOn = false; - - this.mixedAmbientLight = []; - this.mixedSpecularColor = []; - - // p5.framebuffer for this are calculated in getDiffusedTexture function - this.diffusedTextures = new Map(); - // p5.framebuffer for this are calculated in getSpecularTexture function - this.specularTextures = new Map(); - - this.preEraseBlend = undefined; - this._cachedBlendMode = undefined; - this._cachedFillStyle = [1, 1, 1, 1]; - this._cachedStrokeStyle = [0, 0, 0, 1]; if (this.webglVersion === constants.WEBGL2) { this.blendExt = this.GL; } else { - this.blendExt = this.GL.getExtension('EXT_blend_minmax'); - } - this._isBlending = false; - - this._useLineColor = false; - this._useVertexColor = false; - - this.registerEnabled = new Set(); - - // Camera - this.states.curCamera._computeCameraDefaultSettings(); - this.states.curCamera._setDefaultCamera(); - - // FilterCamera - this.filterCamera = new Camera(this); - this.filterCamera._computeCameraDefaultSettings(); - this.filterCamera._setDefaultCamera(); - // Information about the previous frame's touch object - // for executing orbitControl() - this.prevTouches = []; - // Velocity variable for use with orbitControl() - this.zoomVelocity = 0; - this.rotateVelocity = new Vector(0, 0); - this.moveVelocity = new Vector(0, 0); - // Flags for recording the state of zooming, rotation and moving - this.executeZoom = false; - this.executeRotateAndMove = false; - - this._drawingFilter = false; - this._drawingImage = false; - - this.specularShader = undefined; - this.sphereMapping = undefined; - this.diffusedShader = undefined; - this._baseFilterShader = undefined; - this._defaultLightShader = undefined; - this._defaultImmediateModeShader = undefined; - this._defaultNormalShader = undefined; - this._defaultColorShader = undefined; - this._defaultPointShader = undefined; - - this.states.userFillShader = undefined; - this.states.userStrokeShader = undefined; - this.states.userPointShader = undefined; - this.states.userImageShader = undefined; - - this.states.curveDetail = 1 / 4; - - // Used by beginShape/endShape functions to construct a p5.Geometry - this.shapeBuilder = new ShapeBuilder(this); - - this.buffers = { - fill: [ - new RenderBuffer( - 3, - 'vertices', - 'vertexBuffer', - 'aPosition', - this, - this._vToNArray - ), - new RenderBuffer( - 3, - 'vertexNormals', - 'normalBuffer', - 'aNormal', - this, - this._vToNArray - ), - new RenderBuffer( - 4, - 'vertexColors', - 'colorBuffer', - 'aVertexColor', - this - ), - new RenderBuffer( - 3, - 'vertexAmbients', - 'ambientBuffer', - 'aAmbientColor', - this - ), - new RenderBuffer(2, 'uvs', 'uvBuffer', 'aTexCoord', this, arr => - arr.flat() - ) - ], - stroke: [ - new RenderBuffer( - 4, - 'lineVertexColors', - 'lineColorBuffer', - 'aVertexColor', - this - ), - new RenderBuffer( - 3, - 'lineVertices', - 'lineVerticesBuffer', - 'aPosition', - this - ), - new RenderBuffer( - 3, - 'lineTangentsIn', - 'lineTangentsInBuffer', - 'aTangentIn', - this - ), - new RenderBuffer( - 3, - 'lineTangentsOut', - 'lineTangentsOutBuffer', - 'aTangentOut', - this - ), - new RenderBuffer(1, 'lineSides', 'lineSidesBuffer', 'aSide', this) - ], - text: [ - new RenderBuffer( - 3, - 'vertices', - 'vertexBuffer', - 'aPosition', - this, - this._vToNArray - ), - new RenderBuffer(2, 'uvs', 'uvBuffer', 'aTexCoord', this, arr => - arr.flat() - ) - ], - point: this.GL.createBuffer(), - user: [] - }; - - this.geometryBufferCache = new GeometryBufferCache(this); - - this.curStrokeCap = constants.ROUND; - this.curStrokeJoin = constants.ROUND; - - // map of texture sources to textures created in this gl context via this.getTexture(src) - this.textures = new Map(); - - // set of framebuffers in use - this.framebuffers = new Set(); - // stack of active framebuffers - this.activeFramebuffers = []; - - // for post processing step - this.states.filterShader = undefined; - this.filterLayer = undefined; - this.filterLayerTemp = undefined; - this.defaultFilterShaders = {}; - - this.fontInfos = {}; - - this._curShader = undefined; - this.drawShapeCount = 1; - - this.scratchMat3 = new Matrix(3); + this.blendExt = this.GL.getExtension("EXT_blend_minmax"); + } this._userEnabledStencil = false; // Store original methods for internal use @@ -439,7 +111,7 @@ class RendererGL extends Renderer { this._internalDisable = this.drawingContext.disable; // Override WebGL enable function - this.drawingContext.enable = key => { + this.drawingContext.enable = (key) => { if (key === this.drawingContext.STENCIL_TEST) { if (!this._clipping) { this._userEnabledStencil = true; @@ -449,295 +121,29 @@ class RendererGL extends Renderer { }; // Override WebGL disable function - this.drawingContext.disable = key => { + this.drawingContext.disable = (key) => { if (key === this.drawingContext.STENCIL_TEST) { - this._userEnabledStencil = false; + this._userEnabledStencil = false; } return this._internalDisable.call(this.drawingContext, key); }; - // Whether or not to remove degenerate faces from geometry. This is usually - // set to false for performance. - this._validateFaces = false; - } - - remove() { - this.wrappedElt.remove(); - this.wrappedElt = null; - this.canvas = null; - this.elt = null; - } - - ////////////////////////////////////////////// - // Geometry Building - ////////////////////////////////////////////// - - /** - * Starts creating a new p5.Geometry. Subsequent shapes drawn will be added - * to the geometry and then returned when - * endGeometry() is called. One can also use - * buildGeometry() to pass a function that - * draws shapes. - * - * If you need to draw complex shapes every frame which don't change over time, - * combining them upfront with `beginGeometry()` and `endGeometry()` and then - * drawing that will run faster than repeatedly drawing the individual pieces. - * @private - */ - beginGeometry() { - if (this.geometryBuilder) { - throw new Error( - 'It looks like `beginGeometry()` is being called while another p5.Geometry is already being build.' - ); - } - this.geometryBuilder = new GeometryBuilder(this); - this.geometryBuilder.prevFillColor = this.states.fillColor; - this.fill(new Color([-1, -1, -1, -1])); - } - - /** - * Finishes creating a new p5.Geometry that was - * started using beginGeometry(). One can also - * use buildGeometry() to pass a function that - * draws shapes. - * @private - * - * @returns {p5.Geometry} The model that was built. - */ - endGeometry() { - if (!this.geometryBuilder) { - throw new Error( - 'Make sure you call beginGeometry() before endGeometry()!' - ); - } - const geometry = this.geometryBuilder.finish(); - this.fill(this.geometryBuilder.prevFillColor); - this.geometryBuilder = undefined; - return geometry; - } - - /** - * Creates a new p5.Geometry that contains all - * the shapes drawn in a provided callback function. The returned combined shape - * can then be drawn all at once using model(). - * - * If you need to draw complex shapes every frame which don't change over time, - * combining them with `buildGeometry()` once and then drawing that will run - * faster than repeatedly drawing the individual pieces. - * - * One can also draw shapes directly between - * beginGeometry() and - * endGeometry() instead of using a callback - * function. - * @param {Function} callback A function that draws shapes. - * @returns {p5.Geometry} The model that was built from the callback function. - */ - buildGeometry(callback) { - this.beginGeometry(); - callback(); - return this.endGeometry(); - } - - ////////////////////////////////////////////// - // Shape drawing - ////////////////////////////////////////////// - - beginShape(...args) { - super.beginShape(...args); - // TODO remove when shape refactor is complete - // this.shapeBuilder.beginShape(...args); - } - - curveDetail(d) { - if (d === undefined) { - return this.states.curveDetail; - } else { - this.states.setValue('curveDetail', d); - } - } - - drawShape(shape) { - const visitor = new PrimitiveToVerticesConverter({ - curveDetail: this.states.curveDetail - }); - shape.accept(visitor); - this.shapeBuilder.constructFromContours(shape, visitor.contours); - - if (this.geometryBuilder) { - this.geometryBuilder.addImmediate( - this.shapeBuilder.geometry, - this.shapeBuilder.shapeMode, - { validateFaces: this._validateFaces } - ); - } else if (this.states.fillColor || this.states.strokeColor) { - if (this.shapeBuilder.shapeMode === constants.POINTS) { - this._drawPoints( - this.shapeBuilder.geometry.vertices, - this.buffers.point - ); - } else { - this._drawGeometry(this.shapeBuilder.geometry, { - mode: this.shapeBuilder.shapeMode, - count: this.drawShapeCount - }); - } - } - this.drawShapeCount = 1; - } - - endShape(mode, count) { - this.drawShapeCount = count; - super.endShape(mode, count); - } - - vertexProperty(...args) { - this.currentShape.vertexProperty(...args); - } - - normal(xorv, y, z) { - if (xorv instanceof Vector) { - this.states.setValue('_currentNormal', xorv); - } else { - this.states.setValue('_currentNormal', new Vector(xorv, y, z)); - } - this.updateShapeVertexProperties(); + this._cachedBlendMode = undefined; } - model(model, count = 1) { - if (model.vertices.length > 0) { - if (this.geometryBuilder) { - this.geometryBuilder.addRetained(model); - } else { - if (!this.geometryInHash(model.gid)) { - model._edgesToVertices(); - this._getOrMakeCachedBuffers(model); - } - - this._drawGeometry(model, { count }); - } - } + setupContext() { + this._setAttributeDefaults(this._pInst); + this._initContext(); + // This redundant property is useful in reminding you that you are + // interacting with WebGLRenderingContext, still worth considering future removal + this.GL = this.drawingContext; } ////////////////////////////////////////////// // Rendering ////////////////////////////////////////////// - _drawGeometry(geometry, { mode = constants.TRIANGLES, count = 1 } = {}) { - for (const propName in geometry.userVertexProperties) { - const prop = geometry.userVertexProperties[propName]; - this.buffers.user.push( - new RenderBuffer( - prop.getDataSize(), - prop.getSrcName(), - prop.getDstName(), - prop.getName(), - this - ) - ); - } - - if ( - this.states.fillColor && - geometry.vertices.length >= 3 && - ![constants.LINES, constants.POINTS].includes(mode) - ) { - this._drawFills(geometry, { mode, count }); - } - - if (this.states.strokeColor && geometry.lineVertices.length >= 1) { - this._drawStrokes(geometry, { count }); - } - - this.buffers.user = []; - } - - _drawGeometryScaled(model, scaleX, scaleY, scaleZ) { - let originalModelMatrix = this.states.uModelMatrix; - this.states.setValue('uModelMatrix', this.states.uModelMatrix.clone()); - try { - this.states.uModelMatrix.scale(scaleX, scaleY, scaleZ); - - if (this.geometryBuilder) { - this.geometryBuilder.addRetained(model); - } else { - this._drawGeometry(model); - } - } finally { - this.states.setValue('uModelMatrix', originalModelMatrix); - } - } - - _drawFills(geometry, { count, mode } = {}) { - this._useVertexColor = geometry.vertexColors.length > 0; - - const shader = - !this._drawingFilter && this.states.userFillShader - ? this.states.userFillShader - : this._getFillShader(); - shader.bindShader(); - this._setGlobalUniforms(shader); - this._setFillUniforms(shader); - shader.bindTextures(); - - for (const buff of this.buffers.fill) { - buff._prepareBuffer(geometry, shader); - } - this._prepareUserAttributes(geometry, shader); - shader.disableRemainingAttributes(); - - this._applyColorBlend( - this.states.curFillColor, - geometry.hasFillTransparency() - ); - - this._drawBuffers(geometry, { mode, count }); - - shader.unbindShader(); - } - - _drawStrokes(geometry, { count } = {}) { - const gl = this.GL; - - this._useLineColor = geometry.vertexStrokeColors.length > 0; - - const shader = this._getStrokeShader(); - shader.bindShader(); - this._setGlobalUniforms(shader); - this._setStrokeUniforms(shader); - shader.bindTextures(); - - for (const buff of this.buffers.stroke) { - buff._prepareBuffer(geometry, shader); - } - this._prepareUserAttributes(geometry, shader); - shader.disableRemainingAttributes(); - - this._applyColorBlend( - this.states.curStrokeColor, - geometry.hasStrokeTransparency() - ); - - if (count === 1) { - gl.drawArrays(gl.TRIANGLES, 0, geometry.lineVertices.length / 3); - } else { - try { - gl.drawArraysInstanced( - gl.TRIANGLES, - 0, - geometry.lineVertices.length / 3, - count - ); - } catch (e) { - console.log( - '🌸 p5.js says: Instancing is only supported in WebGL2 mode' - ); - } - } - - shader.unbindShader(); - } - - _drawPoints(vertices, vertexBuffer) { + /*_drawPoints(vertices, vertexBuffer) { const gl = this.GL; const pointShader = this._getPointShader(); pointShader.bindShader(); @@ -760,39 +166,143 @@ class RendererGL extends Renderer { gl.drawArrays(gl.Points, 0, vertices.length); pointShader.unbindShader(); - } + }*/ - _prepareUserAttributes(geometry, shader) { - for (const buff of this.buffers.user) { - if (!this._pInst.constructor.disableFriendlyErrors) { - // Check for the right data size - const prop = geometry.userVertexProperties[buff.attr]; - if (prop) { - const adjustedLength = prop.getSrcArray().length / prop.getDataSize(); - if (adjustedLength > geometry.vertices.length) { - this._pInst.constructor._friendlyError( - `One of the geometries has a custom vertex property '${prop.getName()}' with more values than vertices. This is probably caused by directly using the Geometry.vertexProperty() method.`, - 'vertexProperty()' - ); - } else if (adjustedLength < geometry.vertices.length) { - this._pInst.constructor._friendlyError( - `One of the geometries has a custom vertex property '${prop.getName()}' with fewer values than vertices. This is probably caused by directly using the Geometry.vertexProperty() method.`, - 'vertexProperty()' - ); - } + /** + * @private sets blending in gl context to curBlendMode + * @param {Number[]} color [description] + * @return {Number[]} Normalized numbers array + */ + _applyBlendMode () { + if (this._cachedBlendMode === this.states.curBlendMode) { + return; + } + const gl = this.GL; + switch (this.states.curBlendMode) { + case constants.BLEND: + gl.blendEquation(gl.FUNC_ADD); + gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA); + break; + case constants.ADD: + gl.blendEquation(gl.FUNC_ADD); + gl.blendFunc(gl.ONE, gl.ONE); + break; + case constants.REMOVE: + gl.blendEquation(gl.FUNC_ADD); + gl.blendFunc(gl.ZERO, gl.ONE_MINUS_SRC_ALPHA); + break; + case constants.MULTIPLY: + gl.blendEquation(gl.FUNC_ADD); + gl.blendFunc(gl.DST_COLOR, gl.ONE_MINUS_SRC_ALPHA); + break; + case constants.SCREEN: + gl.blendEquation(gl.FUNC_ADD); + gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_COLOR); + break; + case constants.EXCLUSION: + gl.blendEquationSeparate(gl.FUNC_ADD, gl.FUNC_ADD); + gl.blendFuncSeparate( + gl.ONE_MINUS_DST_COLOR, + gl.ONE_MINUS_SRC_COLOR, + gl.ONE, + gl.ONE + ); + break; + case constants.REPLACE: + gl.blendEquation(gl.FUNC_ADD); + gl.blendFunc(gl.ONE, gl.ZERO); + break; + case constants.SUBTRACT: + gl.blendEquationSeparate(gl.FUNC_REVERSE_SUBTRACT, gl.FUNC_ADD); + gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ONE, gl.ONE_MINUS_SRC_ALPHA); + break; + case constants.DARKEST: + if (this.blendExt) { + gl.blendEquationSeparate( + this.blendExt.MIN || this.blendExt.MIN_EXT, + gl.FUNC_ADD + ); + gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ONE, gl.ONE); + } else { + console.warn( + 'blendMode(DARKEST) does not work in your browser in WEBGL mode.' + ); + } + break; + case constants.LIGHTEST: + if (this.blendExt) { + gl.blendEquationSeparate( + this.blendExt.MAX || this.blendExt.MAX_EXT, + gl.FUNC_ADD + ); + gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ONE, gl.ONE); + } else { + console.warn( + 'blendMode(LIGHTEST) does not work in your browser in WEBGL mode.' + ); } + break; + default: + console.error( + 'Oops! Somehow Renderer3D set curBlendMode to an unsupported mode.' + ); + break; + } + this._cachedBlendMode = this.states.curBlendMode; + } + + _shaderOptions() { + return undefined; + } + + _useShader(shader) { + const gl = this.GL; + gl.useProgram(shader._glProgram); + } + + /** + * Once all buffers have been bound, this checks to see if there are any + * remaining active attributes, likely left over from previous renders, + * and disables them so that they don't affect rendering. + * @private + */ + _disableRemainingAttributes(shader) { + for (const location of this.registerEnabled.values()) { + if ( + !Object.keys(shader.attributes).some( + key => shader.attributes[key].location === location + ) + ) { + this.GL.disableVertexAttribArray(location); + this.registerEnabled.delete(location); } - buff._prepareBuffer(geometry, shader); } } - _drawBuffers(geometry, { mode = this.GL.TRIANGLES, count }) { + _drawBuffers(geometry, { mode = constants.TRIANGLES, count }) { const gl = this.GL; const glBuffers = this.geometryBufferCache.getCached(geometry); if (!glBuffers) return; - if (glBuffers.indexBuffer) { + if (this._curShader.shaderType === 'stroke'){ + if (count === 1) { + gl.drawArrays(gl.TRIANGLES, 0, geometry.lineVertices.length / 3); + } else { + try { + gl.drawArraysInstanced( + gl.TRIANGLES, + 0, + geometry.lineVertices.length / 3, + count + ); + } catch (e) { + console.log( + "🌸 p5.js says: Instancing is only supported in WebGL2 mode" + ); + } + } + } else if (glBuffers.indexBuffer) { this._bindBuffer(glBuffers.indexBuffer, gl.ELEMENT_ARRAY_BUFFER); // If this model is using a Uint32Array we need to ensure the @@ -801,9 +311,9 @@ class RendererGL extends Renderer { this._pInst.webglVersion !== constants.WEBGL2 && glBuffers.indexBufferType === gl.UNSIGNED_INT ) { - if (!gl.getExtension('OES_element_index_uint')) { + if (!gl.getExtension("OES_element_index_uint")) { throw new Error( - 'Unable to render a 3d model with > 65535 triangles. Your web browser does not support the WebGL Extension OES_element_index_uint.' + "Unable to render a 3d model with > 65535 triangles. Your web browser does not support the WebGL Extension OES_element_index_uint." ); } } @@ -826,36 +336,33 @@ class RendererGL extends Renderer { ); } catch (e) { console.log( - '🌸 p5.js says: Instancing is only supported in WebGL2 mode' + "🌸 p5.js says: Instancing is only supported in WebGL2 mode" ); } } } else { + const glMode = mode === constants.TRIANGLES ? gl.TRIANGLES : gl.TRIANGLE_STRIP; if (count === 1) { - gl.drawArrays(mode, 0, geometry.vertices.length); + gl.drawArrays(glMode, 0, geometry.vertices.length); } else { try { - gl.drawArraysInstanced(mode, 0, geometry.vertices.length, count); + gl.drawArraysInstanced(glMode, 0, geometry.vertices.length, count); } catch (e) { console.log( - '🌸 p5.js says: Instancing is only supported in WebGL2 mode' + "🌸 p5.js says: Instancing is only supported in WebGL2 mode" ); } } } } - _getOrMakeCachedBuffers(geometry) { - return this.geometryBufferCache.ensureCached(geometry); - } - ////////////////////////////////////////////// // Setting ////////////////////////////////////////////// _setAttributeDefaults(pInst) { // See issue #3850, safer to enable AA in Safari - const applyAA = navigator.userAgent.toLowerCase().includes('safari'); + const applyAA = navigator.userAgent.toLowerCase().includes("safari"); const defaults = { alpha: true, depth: true, @@ -864,7 +371,7 @@ class RendererGL extends Renderer { premultipliedAlpha: true, preserveDrawingBuffer: true, perPixelLighting: true, - version: 2 + version: 2, }; if (pInst._glAttributes === null) { pInst._glAttributes = defaults; @@ -874,11 +381,59 @@ class RendererGL extends Renderer { return; } + _setAttributes(key, value) { + if (typeof this._pInst._glAttributes === "undefined") { + console.log( + "You are trying to use setAttributes on a p5.Graphics object " + + "that does not use a WEBGL renderer." + ); + return; + } + let unchanged = true; + if (typeof value !== "undefined") { + //first time modifying the attributes + if (this._pInst._glAttributes === null) { + this._pInst._glAttributes = {}; + } + if (this._pInst._glAttributes[key] !== value) { + //changing value of previously altered attribute + this._pInst._glAttributes[key] = value; + unchanged = false; + } + //setting all attributes with some change + } else if (key instanceof Object) { + if (this._pInst._glAttributes !== key) { + this._pInst._glAttributes = key; + unchanged = false; + } + } + //@todo_FES + if (!this.isP3D || unchanged) { + return; + } + + if (!this._pInst._setupDone) { + if (this.geometryBufferCache.numCached() > 0) { + p5._friendlyError( + "Sorry, Could not set the attributes, you need to call setAttributes() " + + "before calling the other drawing methods in setup()" + ); + return; + } + } + + this._resetContext(null, null, RendererGL); + + if (this.states.curCamera) { + this.states.curCamera._renderer = this._renderer; + } + } + _initContext() { if (this._pInst._glAttributes?.version !== 1) { // Unless WebGL1 is explicitly asked for, try to create a WebGL2 context this.drawingContext = this.canvas.getContext( - 'webgl2', + "webgl2", this._pInst._glAttributes ); } @@ -892,11 +447,11 @@ class RendererGL extends Renderer { // disabled via `setAttributes({ version: 1 })` or because the device // doesn't support it), fall back to a WebGL1 context this.drawingContext = - this.canvas.getContext('webgl', this._pInst._glAttributes) || - this.canvas.getContext('experimental-webgl', this._pInst._glAttributes); + this.canvas.getContext("webgl", this._pInst._glAttributes) || + this.canvas.getContext("experimental-webgl", this._pInst._glAttributes); } if (this.drawingContext === null) { - throw new Error('Error creating webgl context'); + throw new Error("Error creating webgl context"); } else { const gl = this.drawingContext; gl.enable(gl.DEPTH_TEST); @@ -912,6 +467,8 @@ class RendererGL extends Renderer { } } + _updateSize() {} + _getMaxTextureSize() { const gl = this.drawingContext; return gl.getParameter(gl.MAX_TEXTURE_SIZE); @@ -931,7 +488,7 @@ class RendererGL extends Renderer { if (adjustedWidth !== width || adjustedHeight !== height) { console.warn( - 'Warning: The requested width/height exceeds hardware limits. ' + + "Warning: The requested width/height exceeds hardware limits. " + `Adjusting dimensions to width: ${adjustedWidth}, height: ${adjustedHeight}.` ); } @@ -939,504 +496,15 @@ class RendererGL extends Renderer { return { adjustedWidth, adjustedHeight }; } - //This is helper function to reset the context anytime the attributes - //are changed with setAttributes() - - _resetContext(options, callback) { - const w = this.width; - const h = this.height; - const defaultId = this.canvas.id; - const isPGraphics = this._pInst instanceof Graphics; - - // Preserve existing position and styles before recreation - const prevStyle = { - position: this.canvas.style.position, - top: this.canvas.style.top, - left: this.canvas.style.left - }; - - if (isPGraphics) { - // Handle PGraphics: remove and recreate the canvas - const pg = this._pInst; - pg.canvas.parentNode.removeChild(pg.canvas); - pg.canvas = document.createElement('canvas'); - const node = pg._pInst._userNode || document.body; - node.appendChild(pg.canvas); - Element.call(pg, pg.canvas, pg._pInst); - // Restore previous width and height - pg.width = w; - pg.height = h; - } else { - // Handle main canvas: remove and recreate it - let c = this.canvas; - if (c) { - c.parentNode.removeChild(c); - } - c = document.createElement('canvas'); - c.id = defaultId; - // Attach the new canvas to the correct parent node - if (this._pInst._userNode) { - this._pInst._userNode.appendChild(c); - } else { - document.body.appendChild(c); - } - this._pInst.canvas = c; - this.canvas = c; - - // Restore the saved position - this.canvas.style.position = prevStyle.position; - this.canvas.style.top = prevStyle.top; - this.canvas.style.left = prevStyle.left; - } - - const renderer = new RendererGL( - this._pInst, - w, - h, - !isPGraphics, - this._pInst.canvas - ); - this._pInst._renderer = renderer; - - renderer._applyDefaults(); - - if (typeof callback === 'function') { - //setTimeout with 0 forces the task to the back of the queue, this ensures that - //we finish switching out the renderer - setTimeout(() => { - callback.apply(window._renderer, options); - }, 0); - } - } - - _update() { - // reset model view and apply initial camera transform - // (containing only look at info; no projection). - this.states.setValue('uModelMatrix', this.states.uModelMatrix.clone()); - this.states.uModelMatrix.reset(); - this.states.setValue('uViewMatrix', this.states.uViewMatrix.clone()); - this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); - - // reset light data for new frame. - - this.states.setValue('ambientLightColors', []); - this.states.setValue('specularColors', [1, 1, 1]); - - this.states.setValue('directionalLightDirections', []); - this.states.setValue('directionalLightDiffuseColors', []); - this.states.setValue('directionalLightSpecularColors', []); - - this.states.setValue('pointLightPositions', []); - this.states.setValue('pointLightDiffuseColors', []); - this.states.setValue('pointLightSpecularColors', []); - - this.states.setValue('spotLightPositions', []); - this.states.setValue('spotLightDirections', []); - this.states.setValue('spotLightDiffuseColors', []); - this.states.setValue('spotLightSpecularColors', []); - this.states.setValue('spotLightAngle', []); - this.states.setValue('spotLightConc', []); - - this.states.setValue('enableLighting', false); - - //reset tint value for new frame - this.states.setValue('tint', [255, 255, 255, 255]); - - //Clear depth every frame + _resetBuffersBeforeDraw() { this.GL.clearStencil(0); this.GL.clear(this.GL.DEPTH_BUFFER_BIT | this.GL.STENCIL_BUFFER_BIT); if (!this._userEnabledStencil) { this._internalDisable.call(this.GL, this.GL.STENCIL_TEST); } - } - /** - * [background description] - */ - background(...args) { - const _col = this._pInst.color(...args); - this.clear(..._col._getRGBA()); - } - - ////////////////////////////////////////////// - // Positioning - ////////////////////////////////////////////// - - get uModelMatrix() { - return this.states.uModelMatrix; - } - - get uViewMatrix() { - return this.states.uViewMatrix; - } - - get uPMatrix() { - return this.states.uPMatrix; - } - - get uMVMatrix() { - const m = this.uModelMatrix.copy(); - m.mult(this.uViewMatrix); - return m; - } - - /** - * Get a matrix from world-space to screen-space - */ - getWorldToScreenMatrix() { - const modelMatrix = this.states.uModelMatrix; - const viewMatrix = this.states.uViewMatrix; - const projectionMatrix = this.states.uPMatrix; - const projectedToScreenMatrix = new Matrix(4); - projectedToScreenMatrix.scale(this.width, this.height, 1); - projectedToScreenMatrix.translate([0.5, 0.5, 0.5]); - projectedToScreenMatrix.scale(0.5, -0.5, 0.5); - - const modelViewMatrix = modelMatrix.copy().mult(viewMatrix); - const modelViewProjectionMatrix = modelViewMatrix.mult(projectionMatrix); - const worldToScreenMatrix = modelViewProjectionMatrix - .mult(projectedToScreenMatrix); - return worldToScreenMatrix; - } - - ////////////////////////////////////////////// - // COLOR - ////////////////////////////////////////////// - /** - * Basic fill material for geometry with a given color - * @param {Number|Number[]|String|p5.Color} v1 gray value, - * red or hue value (depending on the current color mode), - * or color Array, or CSS color string - * @param {Number} [v2] green or saturation value - * @param {Number} [v3] blue or brightness value - * @param {Number} [a] opacity - * @chainable - * @example - *
- * - * function setup() { - * createCanvas(200, 200, WEBGL); - * } - * - * function draw() { - * background(0); - * noStroke(); - * fill(100, 100, 240); - * rotateX(frameCount * 0.01); - * rotateY(frameCount * 0.01); - * box(75, 75, 75); - * } - * - *
- * - * @alt - * black canvas with purple cube spinning - */ - fill(...args) { - super.fill(...args); - //see material.js for more info on color blending in webgl - // const color = fn.color.apply(this._pInst, arguments); - const color = this.states.fillColor; - this.states.setValue('curFillColor', color._array); - this.states.setValue('drawMode', constants.FILL); - this.states.setValue('_useNormalMaterial', false); - this.states.setValue('_tex', null); - } - - /** - * Basic stroke material for geometry with a given color - * @param {Number|Number[]|String|p5.Color} v1 gray value, - * red or hue value (depending on the current color mode), - * or color Array, or CSS color string - * @param {Number} [v2] green or saturation value - * @param {Number} [v3] blue or brightness value - * @param {Number} [a] opacity - * @example - *
- * - * function setup() { - * createCanvas(200, 200, WEBGL); - * } - * - * function draw() { - * background(0); - * stroke(240, 150, 150); - * fill(100, 100, 240); - * rotateX(frameCount * 0.01); - * rotateY(frameCount * 0.01); - * box(75, 75, 75); - * } - * - *
- * - * @alt - * black canvas with purple cube with pink outline spinning - */ - stroke(...args) { - super.stroke(...args); - // const color = fn.color.apply(this._pInst, arguments); - this.states.setValue('curStrokeColor', this.states.strokeColor._array); - } - - getCommonVertexProperties() { - return { - ...super.getCommonVertexProperties(), - stroke: this.states.strokeColor, - fill: this.states.fillColor, - normal: this.states._currentNormal - }; - } - - getSupportedIndividualVertexProperties() { - return { - textureCoordinates: true - }; - } - - strokeCap(cap) { - this.curStrokeCap = cap; - } - - strokeJoin(join) { - this.curStrokeJoin = join; - } - getFilterLayer() { - if (!this.filterLayer) { - this.filterLayer = new Framebuffer(this); - } - return this.filterLayer; - } - getFilterLayerTemp() { - if (!this.filterLayerTemp) { - this.filterLayerTemp = new Framebuffer(this); - } - return this.filterLayerTemp; - } - matchSize(fboToMatch, target) { - if ( - fboToMatch.width !== target.width || - fboToMatch.height !== target.height - ) { - fboToMatch.resize(target.width, target.height); - } - - if (fboToMatch.pixelDensity() !== target.pixelDensity()) { - fboToMatch.pixelDensity(target.pixelDensity()); - } - } - filter(...args) { - let fbo = this.getFilterLayer(); - - // use internal shader for filter constants BLUR, INVERT, etc - let filterParameter = undefined; - let operation = undefined; - if (typeof args[0] === 'string') { - operation = args[0]; - let useDefaultParam = - operation in filterParamDefaults && args[1] === undefined; - filterParameter = useDefaultParam - ? filterParamDefaults[operation] - : args[1]; - - // Create and store shader for constants once on initial filter call. - // Need to store multiple in case user calls different filters, - // eg. filter(BLUR) then filter(GRAY) - if (!(operation in this.defaultFilterShaders)) { - this.defaultFilterShaders[operation] = new Shader( - fbo.renderer, - filterShaderVert, - filterShaderFrags[operation] - ); - } - this.states.setValue( - 'filterShader', - this.defaultFilterShaders[operation] - ); - } - // use custom user-supplied shader - else { - this.states.setValue('filterShader', args[0]); - } - - // Setting the target to the framebuffer when applying a filter to a framebuffer. - - const target = this.activeFramebuffer() || this; - - // Resize the framebuffer 'fbo' and adjust its pixel density if it doesn't match the target. - this.matchSize(fbo, target); - - fbo.draw(() => this.clear()); // prevent undesirable feedback effects accumulating secretly. - - let texelSize = [ - 1 / (target.width * target.pixelDensity()), - 1 / (target.height * target.pixelDensity()) - ]; - - // apply blur shader with multiple passes. - if (operation === constants.BLUR) { - // Treating 'tmp' as a framebuffer. - const tmp = this.getFilterLayerTemp(); - // Resize the framebuffer 'tmp' and adjust its pixel density if it doesn't match the target. - this.matchSize(tmp, target); - // setup - this.push(); - this.states.setValue('strokeColor', null); - this.blendMode(constants.BLEND); - - // draw main to temp buffer - this.shader(this.states.filterShader); - this.states.filterShader.setUniform('texelSize', texelSize); - this.states.filterShader.setUniform('canvasSize', [ - target.width, - target.height - ]); - this.states.filterShader.setUniform( - 'radius', - Math.max(1, filterParameter) - ); - - // Horiz pass: draw `target` to `tmp` - tmp.draw(() => { - this.states.filterShader.setUniform('direction', [1, 0]); - this.states.filterShader.setUniform('tex0', target); - this.clear(); - this.shader(this.states.filterShader); - this.noLights(); - this.plane(target.width, target.height); - }); - - // Vert pass: draw `tmp` to `fbo` - fbo.draw(() => { - this.states.filterShader.setUniform('direction', [0, 1]); - this.states.filterShader.setUniform('tex0', tmp); - this.clear(); - this.shader(this.states.filterShader); - this.noLights(); - this.plane(target.width, target.height); - }); - - this.pop(); - } - // every other non-blur shader uses single pass - else { - fbo.draw(() => { - this.states.setValue('strokeColor', null); - this.blendMode(constants.BLEND); - this.shader(this.states.filterShader); - this.states.filterShader.setUniform('tex0', target); - this.states.filterShader.setUniform('texelSize', texelSize); - this.states.filterShader.setUniform('canvasSize', [ - target.width, - target.height - ]); - // filterParameter uniform only used for POSTERIZE, and THRESHOLD - // but shouldn't hurt to always set - this.states.filterShader.setUniform('filterParameter', filterParameter); - this.noLights(); - this.plane(target.width, target.height); - }); - } - // draw fbo contents onto main renderer. - this.push(); - this.states.setValue('strokeColor', null); - this.clear(); - this.push(); - this.states.setValue('imageMode', constants.CORNER); - this.blendMode(constants.BLEND); - target.filterCamera._resize(); - this.setCamera(target.filterCamera); - this.resetMatrix(); - this._drawingFilter = true; - this.image( - fbo, - 0, - 0, - this.width, - this.height, - -target.width / 2, - -target.height / 2, - target.width, - target.height - ); - this._drawingFilter = false; - this.clearDepth(); - this.pop(); - this.pop(); - } - - // Pass this off to the host instance so that we can treat a renderer and a - // framebuffer the same in filter() - - pixelDensity(newDensity) { - if (newDensity) { - return this._pInst.pixelDensity(newDensity); - } - return this._pInst.pixelDensity(); - } - - blendMode(mode) { - if ( - mode === constants.DARKEST || - mode === constants.LIGHTEST || - mode === constants.ADD || - mode === constants.BLEND || - mode === constants.SUBTRACT || - mode === constants.SCREEN || - mode === constants.EXCLUSION || - mode === constants.REPLACE || - mode === constants.MULTIPLY || - mode === constants.REMOVE - ) - this.states.setValue('curBlendMode', mode); - else if ( - mode === constants.BURN || - mode === constants.OVERLAY || - mode === constants.HARD_LIGHT || - mode === constants.SOFT_LIGHT || - mode === constants.DODGE - ) { - console.warn( - 'BURN, OVERLAY, HARD_LIGHT, SOFT_LIGHT, and DODGE only work for blendMode in 2D mode.' - ); - } - } - - erase(opacityFill, opacityStroke) { - if (!this._isErasing) { - this.preEraseBlend = this.states.curBlendMode; - this._isErasing = true; - this.blendMode(constants.REMOVE); - this._cachedFillStyle = this.states.curFillColor.slice(); - this.states.setValue('curFillColor', [1, 1, 1, opacityFill / 255]); - this._cachedStrokeStyle = this.states.curStrokeColor.slice(); - this.states.setValue('curStrokeColor', [1, 1, 1, opacityStroke / 255]); - } - } - - noErase() { - if (this._isErasing) { - // Restore colors - this.states.setValue('curFillColor', this._cachedFillStyle.slice()); - this.states.setValue('curStrokeColor', this._cachedStrokeStyle.slice()); - // Restore blend mode - this.states.setValue('curBlendMode', this.preEraseBlend); - this.blendMode(this.preEraseBlend); - // Ensure that _applyBlendMode() sets preEraseBlend back to the original blend mode - this._isErasing = false; - this._applyBlendMode(); - } - } - - drawTarget() { - return this.activeFramebuffers[this.activeFramebuffers.length - 1] || this; - } - - beginClip(options = {}) { - super.beginClip(options); - - this.drawTarget()._isClipApplied = true; - + _applyClip() { const gl = this.GL; gl.clearStencil(0); gl.clear(gl.STENCIL_BUFFER_BIT); @@ -1453,16 +521,9 @@ class RendererGL extends Renderer { gl.REPLACE // what to do if both tests pass ); gl.disable(gl.DEPTH_TEST); - - this.push(); - this.resetShader(); - if (this.states.fillColor) this.fill(0, 0); - if (this.states.strokeColor) this.stroke(0, 0); } - endClip() { - this.pop(); - + _unapplyClip() { const gl = this.GL; gl.stencilOp( gl.KEEP, // what to do if the stencil test fails @@ -1475,21 +536,11 @@ class RendererGL extends Renderer { 0xff // mask ); gl.enable(gl.DEPTH_TEST); - - // Mark the depth at which the clip has been applied so that we can clear it - // when we pop past this depth - this._clipDepths.push(this._pushPopDepth); - - super.endClip(); } - _clearClip() { + _clearClipBuffer() { this.GL.clearStencil(1); this.GL.clear(this.GL.STENCIL_BUFFER_BIT); - if (this._clipDepths.length > 0) { - this._clipDepths.pop(); - } - this.drawTarget()._isClipApplied = false; } // x,y are canvas-relative (pre-scaled by _pixelDensity) @@ -1517,8 +568,8 @@ class RendererGL extends Renderer { //@todo_FES if (this._pInst._glAttributes.preserveDrawingBuffer !== true) { console.log( - 'loadPixels only works in WebGL when preserveDrawingBuffer ' + - 'is true.' + "loadPixels only works in WebGL when preserveDrawingBuffer " + + "is true." ); return; } @@ -1547,7 +598,7 @@ class RendererGL extends Renderer { this.push(); this.resetMatrix(); this.clear(); - this.states.setValue('imageMode', constants.CORNER); + this.states.setValue("imageMode", constants.CORNER); this.image( fbo, 0, @@ -1564,30 +615,8 @@ class RendererGL extends Renderer { this.GL.clear(this.GL.DEPTH_BUFFER_BIT); } - /** - * @private - * @returns {p5.Framebuffer} A p5.Framebuffer set to match the size and settings - * of the renderer's canvas. It will be created if it does not yet exist, and - * reused if it does. - */ - _getTempFramebuffer() { - if (!this._tempFramebuffer) { - this._tempFramebuffer = new Framebuffer(this, { - format: constants.UNSIGNED_BYTE, - useDepth: this._pInst._glAttributes.depth, - depthFormat: constants.UNSIGNED_INT, - antialias: this._pInst._glAttributes.antialias - }); - } - return this._tempFramebuffer; - } - - ////////////////////////////////////////////// - // HASH | for geometry - ////////////////////////////////////////////// - - geometryInHash(gid) { - return this.geometryBufferCache.isCached(gid); + zClipRange() { + return [-1, 1]; } viewport(w, h) { @@ -1595,64 +624,18 @@ class RendererGL extends Renderer { this.GL.viewport(0, 0, w, h); } - /** - * [resize description] - * @private - * @param {Number} w [description] - * @param {Number} h [description] - */ - resize(w, h) { - super.resize(w, h); - - // save canvas properties - const props = {}; - for (const key in this.drawingContext) { - const val = this.drawingContext[key]; - if (typeof val !== 'object' && typeof val !== 'function') { - props[key] = val; - } - } - - const dimensions = this._adjustDimensions(w, h); - w = dimensions.adjustedWidth; - h = dimensions.adjustedHeight; - - this.width = w; - this.height = h; - - this.canvas.width = w * this._pixelDensity; - this.canvas.height = h * this._pixelDensity; - this.canvas.style.width = `${w}px`; - this.canvas.style.height = `${h}px`; + _updateViewport() { this._origViewport = { width: this.GL.drawingBufferWidth, - height: this.GL.drawingBufferHeight + height: this.GL.drawingBufferHeight, }; this.viewport(this._origViewport.width, this._origViewport.height); + } - this.states.curCamera._resize(); - - //resize pixels buffer - if (typeof this.pixels !== 'undefined') { - this.pixels = new Uint8Array( - this.GL.drawingBufferWidth * this.GL.drawingBufferHeight * 4 - ); - } - - for (const framebuffer of this.framebuffers) { - // Notify framebuffers of the resize so that any auto-sized framebuffers - // can also update their size - framebuffer._canvasSizeChanged(); - } - - // reset canvas properties - for (const savedKey in props) { - try { - this.drawingContext[savedKey] = props[savedKey]; - } catch (err) { - // ignore read-only property errors - } - } + _createPixelsArray() { + this.pixels = new Uint8Array( + this.GL.drawingBufferWidth * this.GL.drawingBufferHeight * 4 + ); } /** @@ -1699,107 +682,6 @@ class RendererGL extends Renderer { this.GL.clear(this.GL.DEPTH_BUFFER_BIT); } - applyMatrix(a, b, c, d, e, f) { - this.states.setValue('uModelMatrix', this.states.uModelMatrix.clone()); - if (arguments.length === 16) { - // this.states.uModelMatrix.apply(arguments); - Matrix.prototype.apply.apply(this.states.uModelMatrix, arguments); - } else { - this.states.uModelMatrix.apply([ - a, - b, - 0, - 0, - c, - d, - 0, - 0, - 0, - 0, - 1, - 0, - e, - f, - 0, - 1 - ]); - } - } - - /** - * [translate description] - * @private - * @param {Number} x [description] - * @param {Number} y [description] - * @param {Number} z [description] - * @chainable - * @todo implement handle for components or vector as args - */ - translate(x, y, z) { - if (x instanceof Vector) { - z = x.z; - y = x.y; - x = x.x; - } - this.states.setValue('uModelMatrix', this.states.uModelMatrix.clone()); - this.states.uModelMatrix.translate([x, y, z]); - return this; - } - - /** - * Scales the Model View Matrix by a vector - * @private - * @param {Number | p5.Vector | Array} x [description] - * @param {Number} [y] y-axis scalar - * @param {Number} [z] z-axis scalar - * @chainable - */ - scale(x, y, z) { - this.states.setValue('uModelMatrix', this.states.uModelMatrix.clone()); - this.states.uModelMatrix.scale(x, y, z); - return this; - } - - rotate(rad, axis) { - if (typeof axis === 'undefined') { - return this.rotateZ(rad); - } - this.states.setValue('uModelMatrix', this.states.uModelMatrix.clone()); - Matrix.prototype.rotate4x4.apply(this.states.uModelMatrix, arguments); - return this; - } - - rotateX(rad) { - this.rotate(rad, 1, 0, 0); - return this; - } - - rotateY(rad) { - this.rotate(rad, 0, 1, 0); - return this; - } - - rotateZ(rad) { - this.rotate(rad, 0, 0, 1); - return this; - } - - pop(...args) { - if ( - this._clipDepths.length > 0 && - this._pushPopDepth === this._clipDepths[this._clipDepths.length - 1] - ) { - this._clearClip(); - if (!this._userEnabledStencil) { - this._internalDisable.call(this.GL, this.GL.STENCIL_TEST); - } - - // Reset saved state - // this._userEnabledStencil = this._savedStencilTestState; - } - super.pop(...args); - this._applyStencilTestIfClipping(); - } _applyStencilTestIfClipping() { const drawTarget = this.drawTarget(); if (drawTarget._isClipApplied !== this._stencilTestOn) { @@ -1814,13 +696,7 @@ class RendererGL extends Renderer { } } } - resetMatrix() { - this.states.setValue('uModelMatrix', this.states.uModelMatrix.clone()); - this.states.uModelMatrix.reset(); - this.states.setValue('uViewMatrix', this.states.uViewMatrix.clone()); - this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); - return this; - } + ////////////////////////////////////////////// // SHADER @@ -1832,75 +708,27 @@ class RendererGL extends Renderer { * and the shader must be valid in that context. */ - _getStrokeShader() { - // select the stroke shader to use - const stroke = this.states.userStrokeShader; - if (stroke) { - return stroke; - } - return this._getLineShader(); - } - + // TODO move to super class _getSphereMapping(img) { if (!this.sphereMapping) { this.sphereMapping = this._pInst.createFilterShader(sphereMapping); } this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); this.scratchMat3.invert(this.scratchMat3); // uNMMatrix is 3x3 - this.sphereMapping.setUniform('uFovY', this.states.curCamera.cameraFOV); - this.sphereMapping.setUniform('uAspect', this.states.curCamera.aspectRatio); - this.sphereMapping.setUniform('uNewNormalMatrix', this.scratchMat3.mat3); - this.sphereMapping.setUniform('uEnvMap', img); + this.sphereMapping.setUniform("uFovY", this.states.curCamera.cameraFOV); + this.sphereMapping.setUniform("uAspect", this.states.curCamera.aspectRatio); + this.sphereMapping.setUniform("uNewNormalMatrix", this.scratchMat3.mat3); + this.sphereMapping.setUniform("uEnvMap", img); return this.sphereMapping; } - /* - * This method will handle both image shaders and - * fill shaders, returning the appropriate shader - * depending on the current context (image or shape). - */ - _getFillShader() { - // If drawing an image, check for user-defined image shader and filters - if (this._drawingImage) { - // Use user-defined image shader if available and no filter is applied - if (this.states.userImageShader && !this._drawingFilter) { - return this.states.userImageShader; - } else { - return this._getLightShader(); // Fallback to light shader - } - } - // If user has defined a fill shader, return that - else if (this.states.userFillShader) { - return this.states.userFillShader; - } - // Use normal shader if normal material is active - else if (this.states._useNormalMaterial) { - return this._getNormalShader(); - } - // Use light shader if lighting or textures are enabled - else if (this.states.enableLighting || this.states._tex) { - return this._getLightShader(); - } - // Default to color shader if no other conditions are met - return this._getColorShader(); - } - - _getPointShader() { - // select the point shader to use - const point = this.states.userPointShader; - if (!point || !point.isPointShader()) { - return this._getPointShader(); - } - return point; - } - baseMaterialShader() { if (!this._pInst._glAttributes.perPixelLighting) { throw new Error( - 'The material shader does not support hooks without perPixelLighting. Try turning it back on.' + "The material shader does not support hooks without perPixelLighting. Try turning it back on." ); } - return this._getLightShader(); + return super.baseMaterialShader(); } _getLightShader() { @@ -1908,22 +736,22 @@ class RendererGL extends Renderer { if (this._pInst._glAttributes.perPixelLighting) { this._defaultLightShader = new Shader( this, - this._webGL2CompatibilityPrefix('vert', 'highp') + + this._webGL2CompatibilityPrefix("vert", "highp") + defaultShaders.phongVert, - this._webGL2CompatibilityPrefix('frag', 'highp') + + this._webGL2CompatibilityPrefix("frag", "highp") + defaultShaders.phongFrag, { vertex: { - 'void beforeVertex': '() {}', - 'Vertex getObjectInputs': '(Vertex inputs) { return inputs; }', - 'Vertex getWorldInputs': '(Vertex inputs) { return inputs; }', - 'Vertex getCameraInputs': '(Vertex inputs) { return inputs; }', - 'void afterVertex': '() {}' + "void beforeVertex": "() {}", + "Vertex getObjectInputs": "(Vertex inputs) { return inputs; }", + "Vertex getWorldInputs": "(Vertex inputs) { return inputs; }", + "Vertex getCameraInputs": "(Vertex inputs) { return inputs; }", + "void afterVertex": "() {}", }, fragment: { - 'void beforeFragment': '() {}', - 'Inputs getPixelInputs': '(Inputs inputs) { return inputs; }', - 'vec4 combineColors': `(ColorComponents components) { + "void beforeFragment": "() {}", + "Inputs getPixelInputs": "(Inputs inputs) { return inputs; }", + "vec4 combineColors": `(ColorComponents components) { vec4 color = vec4(0.); color.rgb += components.diffuse * components.baseColor; color.rgb += components.ambient * components.ambientColor; @@ -1932,17 +760,17 @@ class RendererGL extends Renderer { color.a = components.opacity; return color; }`, - 'vec4 getFinalColor': '(vec4 color) { return color; }', - 'void afterFragment': '() {}' - } + "vec4 getFinalColor": "(vec4 color) { return color; }", + "void afterFragment": "() {}", + }, } ); } else { this._defaultLightShader = new Shader( this, - this._webGL2CompatibilityPrefix('vert', 'highp') + + this._webGL2CompatibilityPrefix("vert", "highp") + defaultShaders.lightVert, - this._webGL2CompatibilityPrefix('frag', 'highp') + + this._webGL2CompatibilityPrefix("frag", "highp") + defaultShaders.lightTextureFrag ); } @@ -1951,31 +779,27 @@ class RendererGL extends Renderer { return this._defaultLightShader; } - baseNormalShader() { - return this._getNormalShader(); - } - _getNormalShader() { if (!this._defaultNormalShader) { this._defaultNormalShader = new Shader( this, - this._webGL2CompatibilityPrefix('vert', 'mediump') + + this._webGL2CompatibilityPrefix("vert", "mediump") + defaultShaders.normalVert, - this._webGL2CompatibilityPrefix('frag', 'mediump') + + this._webGL2CompatibilityPrefix("frag", "mediump") + defaultShaders.normalFrag, { vertex: { - 'void beforeVertex': '() {}', - 'Vertex getObjectInputs': '(Vertex inputs) { return inputs; }', - 'Vertex getWorldInputs': '(Vertex inputs) { return inputs; }', - 'Vertex getCameraInputs': '(Vertex inputs) { return inputs; }', - 'void afterVertex': '() {}' + "void beforeVertex": "() {}", + "Vertex getObjectInputs": "(Vertex inputs) { return inputs; }", + "Vertex getWorldInputs": "(Vertex inputs) { return inputs; }", + "Vertex getCameraInputs": "(Vertex inputs) { return inputs; }", + "void afterVertex": "() {}", }, fragment: { - 'void beforeFragment': '() {}', - 'vec4 getFinalColor': '(vec4 color) { return color; }', - 'void afterFragment': '() {}' - } + "void beforeFragment": "() {}", + "vec4 getFinalColor": "(vec4 color) { return color; }", + "void afterFragment": "() {}", + }, } ); } @@ -1983,31 +807,27 @@ class RendererGL extends Renderer { return this._defaultNormalShader; } - baseColorShader() { - return this._getColorShader(); - } - _getColorShader() { if (!this._defaultColorShader) { this._defaultColorShader = new Shader( this, - this._webGL2CompatibilityPrefix('vert', 'mediump') + + this._webGL2CompatibilityPrefix("vert", "mediump") + defaultShaders.normalVert, - this._webGL2CompatibilityPrefix('frag', 'mediump') + + this._webGL2CompatibilityPrefix("frag", "mediump") + defaultShaders.basicFrag, { vertex: { - 'void beforeVertex': '() {}', - 'Vertex getObjectInputs': '(Vertex inputs) { return inputs; }', - 'Vertex getWorldInputs': '(Vertex inputs) { return inputs; }', - 'Vertex getCameraInputs': '(Vertex inputs) { return inputs; }', - 'void afterVertex': '() {}' + "void beforeVertex": "() {}", + "Vertex getObjectInputs": "(Vertex inputs) { return inputs; }", + "Vertex getWorldInputs": "(Vertex inputs) { return inputs; }", + "Vertex getCameraInputs": "(Vertex inputs) { return inputs; }", + "void afterVertex": "() {}", }, fragment: { - 'void beforeFragment': '() {}', - 'vec4 getFinalColor': '(vec4 color) { return color; }', - 'void afterFragment': '() {}' - } + "void beforeFragment": "() {}", + "vec4 getFinalColor": "(vec4 color) { return color; }", + "void afterFragment": "() {}", + }, } ); } @@ -2015,92 +835,60 @@ class RendererGL extends Renderer { return this._defaultColorShader; } - /** - * TODO(dave): un-private this when there is a way to actually override the - * shader used for points - * - * Get the shader used when drawing points with `point()`. - * - * You can call `pointShader().modify()` - * and change any of the following hooks: - * - `void beforeVertex`: Called at the start of the vertex shader. - * - `vec3 getLocalPosition`: Update the position of vertices before transforms are applied. It takes in `vec3 position` and must return a modified version. - * - `vec3 getWorldPosition`: Update the position of vertices after transforms are applied. It takes in `vec3 position` and pust return a modified version. - * - `float getPointSize`: Update the size of the point. It takes in `float size` and must return a modified version. - * - `void afterVertex`: Called at the end of the vertex shader. - * - `void beforeFragment`: Called at the start of the fragment shader. - * - `bool shouldDiscard`: Points are drawn inside a square, with the corners discarded in the fragment shader to create a circle. Use this to change this logic. It takes in a `bool willDiscard` and must return a modified version. - * - `vec4 getFinalColor`: Update the final color after mixing. It takes in a `vec4 color` and must return a modified version. - * - `void afterFragment`: Called at the end of the fragment shader. - * - * Call `pointShader().inspectHooks()` to see all the possible hooks and - * their default implementations. - * - * @returns {p5.Shader} The `point()` shader - * @private() - */ - pointShader() { - return this._getPointShader(); - } - _getPointShader() { if (!this._defaultPointShader) { this._defaultPointShader = new Shader( this, - this._webGL2CompatibilityPrefix('vert', 'mediump') + + this._webGL2CompatibilityPrefix("vert", "mediump") + defaultShaders.pointVert, - this._webGL2CompatibilityPrefix('frag', 'mediump') + + this._webGL2CompatibilityPrefix("frag", "mediump") + defaultShaders.pointFrag, { vertex: { - 'void beforeVertex': '() {}', - 'vec3 getLocalPosition': '(vec3 position) { return position; }', - 'vec3 getWorldPosition': '(vec3 position) { return position; }', - 'float getPointSize': '(float size) { return size; }', - 'void afterVertex': '() {}' + "void beforeVertex": "() {}", + "vec3 getLocalPosition": "(vec3 position) { return position; }", + "vec3 getWorldPosition": "(vec3 position) { return position; }", + "float getPointSize": "(float size) { return size; }", + "void afterVertex": "() {}", }, fragment: { - 'void beforeFragment': '() {}', - 'vec4 getFinalColor': '(vec4 color) { return color; }', - 'bool shouldDiscard': '(bool outside) { return outside; }', - 'void afterFragment': '() {}' - } + "void beforeFragment": "() {}", + "vec4 getFinalColor": "(vec4 color) { return color; }", + "bool shouldDiscard": "(bool outside) { return outside; }", + "void afterFragment": "() {}", + }, } ); } return this._defaultPointShader; } - baseStrokeShader() { - return this._getLineShader(); - } - _getLineShader() { if (!this._defaultLineShader) { this._defaultLineShader = new Shader( this, - this._webGL2CompatibilityPrefix('vert', 'mediump') + + this._webGL2CompatibilityPrefix("vert", "mediump") + defaultShaders.lineVert, - this._webGL2CompatibilityPrefix('frag', 'mediump') + + this._webGL2CompatibilityPrefix("frag", "mediump") + defaultShaders.lineFrag, { vertex: { - 'void beforeVertex': '() {}', - 'StrokeVertex getObjectInputs': - '(StrokeVertex inputs) { return inputs; }', - 'StrokeVertex getWorldInputs': - '(StrokeVertex inputs) { return inputs; }', - 'StrokeVertex getCameraInputs': - '(StrokeVertex inputs) { return inputs; }', - 'void afterVertex': '() {}' + "void beforeVertex": "() {}", + "StrokeVertex getObjectInputs": + "(StrokeVertex inputs) { return inputs; }", + "StrokeVertex getWorldInputs": + "(StrokeVertex inputs) { return inputs; }", + "StrokeVertex getCameraInputs": + "(StrokeVertex inputs) { return inputs; }", + "void afterVertex": "() {}", }, fragment: { - 'void beforeFragment': '() {}', - 'Inputs getPixelInputs': '(Inputs inputs) { return inputs; }', - 'vec4 getFinalColor': '(vec4 color) { return color; }', - 'bool shouldDiscard': '(bool outside) { return outside; }', - 'void afterFragment': '() {}' - } + "void beforeFragment": "() {}", + "Inputs getPixelInputs": "(Inputs inputs) { return inputs; }", + "vec4 getFinalColor": "(vec4 color) { return color; }", + "bool shouldDiscard": "(bool outside) { return outside; }", + "void afterFragment": "() {}", + }, } ); } @@ -2111,13 +899,13 @@ class RendererGL extends Renderer { _getFontShader() { if (!this._defaultFontShader) { if (this.webglVersion === constants.WEBGL) { - this.GL.getExtension('OES_standard_derivatives'); + this.GL.getExtension("OES_standard_derivatives"); } this._defaultFontShader = new Shader( this, - this._webGL2CompatibilityPrefix('vert', 'highp') + + this._webGL2CompatibilityPrefix("vert", "highp") + defaultShaders.fontVert, - this._webGL2CompatibilityPrefix('frag', 'highp') + + this._webGL2CompatibilityPrefix("frag", "highp") + defaultShaders.fontFrag ); } @@ -2128,32 +916,32 @@ class RendererGL extends Renderer { if (!this._baseFilterShader) { this._baseFilterShader = new Shader( this, - this._webGL2CompatibilityPrefix('vert', 'highp') + + this._webGL2CompatibilityPrefix("vert", "highp") + defaultShaders.filterBaseVert, - this._webGL2CompatibilityPrefix('frag', 'highp') + + this._webGL2CompatibilityPrefix("frag", "highp") + defaultShaders.filterBaseFrag, { - vertex: {}, - fragment: { - 'vec4 getColor': `(FilterInputs inputs, in sampler2D canvasContent) { + vertex: {}, + fragment: { + "vec4 getColor": `(FilterInputs inputs, in sampler2D canvasContent) { return getTexture(canvasContent, inputs.texCoord); - }` + }`, + }, } - } ); } return this._baseFilterShader; } _webGL2CompatibilityPrefix(shaderType, floatPrecision) { - let code = ''; + let code = ""; if (this.webglVersion === constants.WEBGL2) { - code += '#version 300 es\n#define WEBGL2\n'; + code += "#version 300 es\n#define WEBGL2\n"; } - if (shaderType === 'vert') { - code += '#define VERTEX_SHADER\n'; - } else if (shaderType === 'frag') { - code += '#define FRAGMENT_SHADER\n'; + if (shaderType === "vert") { + code += "#define VERTEX_SHADER\n"; + } else if (shaderType === "frag") { + code += "#define FRAGMENT_SHADER\n"; } if (floatPrecision) { code += `precision ${floatPrecision} float;\n`; @@ -2161,43 +949,13 @@ class RendererGL extends Renderer { return code; } - /** - * @private - * Note: DO NOT CALL THIS while in the middle of binding another texture, - * since it will change the texture binding in order to allocate the empty - * texture! Grab its value beforehand! - */ - _getEmptyTexture() { - if (!this._emptyTexture) { - // a plain white texture RGBA, full alpha, single pixel. - const im = new Image(1, 1); - im.set(0, 0, 255); - this._emptyTexture = new Texture(this, im); - } - return this._emptyTexture; - } - - getTexture(input) { - let src = input; - if (src instanceof Framebuffer) { - src = src.color; - } - - const texture = this.textures.get(src); - if (texture) { - return texture; - } - - const tex = new Texture(this, src); - this.textures.set(src, tex); - return tex; - } - /* - * used in imageLight, - * To create a blurry image from the input non blurry img, if it doesn't already exist - * Add it to the diffusedTexture map, - * Returns the blurry image - * maps a Image used by imageLight() to a p5.Framebuffer + // TODO move to super class + /* + * used in imageLight, + * To create a blurry image from the input non blurry img, if it doesn't already exist + * Add it to the diffusedTexture map, + * Returns the blurry image + * maps a Image used by imageLight() to a p5.Framebuffer */ getDiffusedTexture(input) { // if one already exists for a given input image @@ -2213,7 +971,7 @@ class RendererGL extends Renderer { newFramebuffer = new Framebuffer(this, { width, height, - density: 1 + density: 1, }); // create framebuffer is like making a new sketch, all functions on main // sketch it would be available on framebuffer @@ -2225,8 +983,8 @@ class RendererGL extends Renderer { } newFramebuffer.draw(() => { this.shader(this.diffusedShader); - this.diffusedShader.setUniform('environmentMap', input); - this.states.setValue('strokeColor', null); + this.diffusedShader.setUniform("environmentMap", input); + this.states.setValue("strokeColor", null); this.noLights(); this.plane(width, height); }); @@ -2234,6 +992,7 @@ class RendererGL extends Renderer { return newFramebuffer; } + // TODO move to super class /* * used in imageLight, * To create a texture from the input non blurry image, if it doesn't already exist @@ -2256,7 +1015,7 @@ class RendererGL extends Renderer { const framebuffer = new Framebuffer(this, { width: size, height: size, - density: 1 + density: 1, }); let count = Math.log(size) / Math.log(2); if (!this.specularShader) { @@ -2277,9 +1036,9 @@ class RendererGL extends Renderer { framebuffer.draw(() => { this.shader(this.specularShader); this.clear(); - this.specularShader.setUniform('environmentMap', input); - this.specularShader.setUniform('roughness', roughness); - this.states.setValue('strokeColor', null); + this.specularShader.setUniform("environmentMap", input); + this.specularShader.setUniform("roughness", roughness); + this.states.setValue("strokeColor", null); this.noLights(); this.plane(w, w); }); @@ -2292,257 +1051,927 @@ class RendererGL extends Renderer { return tex; } - /** - * @private - * @returns {p5.Framebuffer|null} The currently active framebuffer, or null if - * the main canvas is the current draw target. + /* Binds a buffer to the drawing context + * when passed more than two arguments it also updates or initializes + * the data associated with the buffer */ - activeFramebuffer() { - return this.activeFramebuffers[this.activeFramebuffers.length - 1] || null; + _bindBuffer(buffer, target, values, type, usage) { + const gl = this.GL; + if (!target) target = gl.ARRAY_BUFFER; + gl.bindBuffer(target, buffer); + + if (values !== undefined) { + const data = this._normalizeBufferData(values, type); + gl.bufferData(target, data, usage || gl.STATIC_DRAW); + } } - createFramebuffer(options) { - return new Framebuffer(this, options); + _makeFilterShader(renderer, operation) { + return new Shader( + renderer, + filterShaderVert, + filterShaderFrags[operation] + ); } - _setGlobalUniforms(shader) { - const modelMatrix = this.states.uModelMatrix; - const viewMatrix = this.states.uViewMatrix; - const projectionMatrix = this.states.uPMatrix; - const modelViewMatrix = modelMatrix.copy().mult(viewMatrix); + _prepareBuffer(renderBuffer, geometry, shader) { + const attributes = shader.attributes; + const gl = this.GL; + const glBuffers = this._getOrMakeCachedBuffers(geometry); - shader.setUniform( - 'uPerspective', - this.states.curCamera.useLinePerspective ? 1 : 0 + // loop through each of the buffer definitions + const attr = attributes[renderBuffer.attr]; + if (!attr) { + return; + } + // check if the geometry has the appropriate source array + let buffer = glBuffers[renderBuffer.dst]; + const src = geometry[renderBuffer.src]; + if (src && src.length > 0) { + // check if we need to create the GL buffer + const createBuffer = !buffer; + if (createBuffer) { + // create and remember the buffer + glBuffers[renderBuffer.dst] = buffer = gl.createBuffer(); + } + // bind the buffer + gl.bindBuffer(gl.ARRAY_BUFFER, buffer); + + // check if we need to fill the buffer with data + if (createBuffer || geometry.dirtyFlags[renderBuffer.src] !== false) { + const map = renderBuffer.map; + // get the values from the geometry, possibly transformed + const values = map ? map(src) : src; + // fill the buffer with the values + this._bindBuffer(buffer, gl.ARRAY_BUFFER, values); + // mark the geometry's source array as clean + geometry.dirtyFlags[renderBuffer.src] = false; + } + // enable the attribute + shader.enableAttrib(attr, renderBuffer.size); + } else { + const loc = attr.location; + if (loc === -1 || !this.registerEnabled.has(loc)) { + return; + } + // Disable register corresponding to unused attribute + gl.disableVertexAttribArray(loc); + // Record register availability + this.registerEnabled.delete(loc); + } + } + + _enableAttrib(_shader, attr, size, type, normalized, stride, offset) { + const loc = attr.location; + const gl = this.GL; + // Enable register even if it is disabled + if (!this.registerEnabled.has(loc)) { + gl.enableVertexAttribArray(loc); + // Record register availability + this.registerEnabled.add(loc); + } + gl.vertexAttribPointer( + loc, + size, + type || gl.FLOAT, + normalized || false, + stride || 0, + offset || 0 ); - shader.setUniform('uViewMatrix', viewMatrix.mat4); - shader.setUniform('uProjectionMatrix', projectionMatrix.mat4); - shader.setUniform('uModelMatrix', modelMatrix.mat4); - shader.setUniform('uModelViewMatrix', modelViewMatrix.mat4); - if (shader.uniforms.uModelViewProjectionMatrix) { - const modelViewProjectionMatrix = modelViewMatrix.copy(); - modelViewProjectionMatrix.mult(projectionMatrix); - shader.setUniform( - 'uModelViewProjectionMatrix', - modelViewProjectionMatrix.mat4 - ); + } + + _ensureGeometryBuffers(buffers, indices, indexType) { + const gl = this.GL; + + if (indices) { + const buffer = gl.createBuffer(); + this._bindBuffer(buffer, gl.ELEMENT_ARRAY_BUFFER, indices, indexType); + + buffers.indexBuffer = buffer; + + // If we're using a Uint32Array for our indexBuffer we will need to pass a + // different enum value to WebGL draw triangles. This happens in + // the _drawElements function. + buffers.indexBufferType = indexType === Uint32Array ? gl.UNSIGNED_INT : gl.UNSIGNED_SHORT; + } else if (buffers.indexBuffer) { + // the index buffer is unused, remove it + gl.deleteBuffer(buffers.indexBuffer); + buffers.indexBuffer = null; } - if (shader.uniforms.uNormalMatrix) { - this.scratchMat3.inverseTranspose4x4(modelViewMatrix); - shader.setUniform('uNormalMatrix', this.scratchMat3.mat3); + } + + _freeBuffers(buffers) { + const gl = this.GL; + if (buffers.indexBuffer) { + gl.deleteBuffer(buffers.indexBuffer); } - if (shader.uniforms.uModelNormalMatrix) { - this.scratchMat3.inverseTranspose4x4(this.states.uModelMatrix); - shader.setUniform('uModelNormalMatrix', this.scratchMat3.mat3); + + function freeBuffers(defs) { + for (const def of defs) { + if (buffers[def.dst]) { + gl.deleteBuffer(buffers[def.dst]); + buffers[def.dst] = null; + } + } } - if (shader.uniforms.uCameraNormalMatrix) { - this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); - shader.setUniform('uCameraNormalMatrix', this.scratchMat3.mat3); + + // free all the buffers + freeBuffers(this.buffers.stroke); + freeBuffers(this.buffers.fill); + freeBuffers(this.buffers.user); + } + + _initShader(shader) { + const gl = this.GL; + + const vertShader = gl.createShader(gl.VERTEX_SHADER); + gl.shaderSource(vertShader, shader.vertSrc()); + gl.compileShader(vertShader); + if (!gl.getShaderParameter(vertShader, gl.COMPILE_STATUS)) { + throw new Error(`Yikes! An error occurred compiling the vertex shader: ${ + gl.getShaderInfoLog(vertShader) + } in:\n\n${shader.vertSrc()}`); } - if (shader.uniforms.uCameraRotation) { - this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); - shader.setUniform('uCameraRotation', this.scratchMat3.mat3); + + const fragShader = gl.createShader(gl.FRAGMENT_SHADER); + gl.shaderSource(fragShader, shader.fragSrc()); + gl.compileShader(fragShader); + if (!gl.getShaderParameter(fragShader, gl.COMPILE_STATUS)) { + throw new Error(`Darn! An error occurred compiling the fragment shader: ${ + gl.getShaderInfoLog(fragShader) + }`); + } + + const program = gl.createProgram(); + gl.attachShader(program, vertShader); + gl.attachShader(program, fragShader); + gl.linkProgram(program); + + if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { + throw new Error( + `Snap! Error linking shader program: ${gl.getProgramInfoLog(program)}` + ); } - shader.setUniform('uViewport', this._viewport); + + shader._glProgram = program; + shader._vertShader = vertShader; + shader._fragShader = fragShader; } - _setStrokeUniforms(strokeShader) { - // set the uniform values - strokeShader.setUniform('uSimpleLines', this._simpleLines); - strokeShader.setUniform('uUseLineColor', this._useLineColor); - strokeShader.setUniform('uMaterialColor', this.states.curStrokeColor); - strokeShader.setUniform('uStrokeWeight', this.states.strokeWeight); - strokeShader.setUniform('uStrokeCap', STROKE_CAP_ENUM[this.curStrokeCap]); - strokeShader.setUniform( - 'uStrokeJoin', - STROKE_JOIN_ENUM[this.curStrokeJoin] + _finalizeShader() {} + + _getShaderAttributes(shader) { + return getWebGLShaderAttributes(shader, this.GL); + } + + getUniformMetadata(shader) { + return getWebGLUniformMetadata(shader, this.GL); + } + + updateUniformValue(shader, uniform, data) { + return setWebGLUniformValue( + shader, + uniform, + data, + (tex) => this.getTexture(tex), + this.GL ); } - _setFillUniforms(fillShader) { - this.mixedSpecularColor = [...this.states.curSpecularColor]; + _updateTexture(uniform, tex) { + const gl = this.GL; + gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); + tex.bindTexture(); + tex.update(); + gl.uniform1i(uniform.location, uniform.samplerIndex); + } + + bindTexture(tex) { + // bind texture using gl context + glTarget and + // generated gl texture object + this.GL.bindTexture(this.GL.TEXTURE_2D, tex.getTexture().texture); + } + + unbindTexture() { + // unbind per above, disable texturing on glTarget + this.GL.bindTexture(this.GL.TEXTURE_2D, null); + } + + _unbindFramebufferTexture(uniform) { + // Make sure an empty texture is bound to the slot so that we don't + // accidentally leave a framebuffer bound, causing a feedback loop + // when something else tries to write to it + const gl = this.GL; const empty = this._getEmptyTexture(); + gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); + empty.bindTexture(); + gl.uniform1i(uniform.location, uniform.samplerIndex); + } + + createTexture({ width, height, format, dataType }) { + const gl = this.GL; + const tex = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, tex); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, + gl.RGBA, gl.UNSIGNED_BYTE, null); + // TODO use format and data type + return { texture: tex, glFormat: gl.RGBA, glDataType: gl.UNSIGNED_BYTE }; + } + + createFramebufferTextureHandle(framebufferTexture) { + // For WebGL, framebuffer texture handles are designed to be null + return null; + } + + uploadTextureFromSource({ texture, glFormat, glDataType }, source) { + const gl = this.GL; + gl.texImage2D(gl.TEXTURE_2D, 0, glFormat, glFormat, glDataType, source); + } + + uploadTextureFromData({ texture, glFormat, glDataType }, data, width, height) { + const gl = this.GL; + gl.texImage2D( + gl.TEXTURE_2D, + 0, + glFormat, + width, + height, + 0, + glFormat, + glDataType, + data + ); + } + + getSampler(_texture) { + return undefined; + } + + bindTextureToShader({ texture }, sampler, uniformName, unit) { + const gl = this.GL; + gl.activeTexture(gl.TEXTURE0 + unit); + gl.bindTexture(gl.TEXTURE_2D, texture); + const location = gl.getUniformLocation(glProgram, uniformName); + gl.uniform1i(location, unit); + } + + setTextureParams(texture) { + return setWebGLTextureParams(texture, this.GL, this.webglVersion); + } + + deleteTexture({ texture }) { + this.GL.deleteTexture(texture); + } + + + /** + * @private blends colors according to color components. + * If alpha value is less than 1, or non-standard blendMode + * we need to enable blending on our gl context. + * @param {Number[]} color The currently set color, with values in 0-1 range + * @param {Boolean} [hasTransparency] Whether the shape being drawn has other + * transparency internally, e.g. via vertex colors + * @return {Number[]} Normalized numbers array + */ + _applyColorBlend(colors, hasTransparency) { + const gl = this.GL; + + const isTexture = this.states.drawMode === constants.TEXTURE; + const doBlend = + hasTransparency || + this.states.userFillShader || + this.states.userStrokeShader || + this.states.userPointShader || + isTexture || + this.states.curBlendMode !== constants.BLEND || + colors[colors.length - 1] < 1.0 || + this._isErasing; + + if (doBlend !== this._isBlending) { + if ( + doBlend || + (this.states.curBlendMode !== constants.BLEND && + this.states.curBlendMode !== constants.ADD) + ) { + gl.enable(gl.BLEND); + } else { + gl.disable(gl.BLEND); + } + gl.depthMask(true); + this._isBlending = doBlend; + } + this._applyBlendMode(); + return colors; + } + + ////////////////////////////////////////////// + // Shader hooks + ////////////////////////////////////////////// + populateHooks(shader, src, shaderType) { + return populateGLSLHooks(shader, src, shaderType); + } + + ////////////////////////////////////////////// + // Framebuffer methods + ////////////////////////////////////////////// + + defaultFramebufferAlpha() { + return this._pInst._glAttributes.alpha; + } + + defaultFramebufferAntialias() { + return this.supportsFramebufferAntialias() + ? this._pInst._glAttributes.antialias + : false; + } + + supportsFramebufferAntialias() { + return this.webglVersion === constants.WEBGL2; + } + + createFramebufferResources(framebuffer) { + const gl = this.GL; + + framebuffer.framebuffer = gl.createFramebuffer(); + if (!framebuffer.framebuffer) { + throw new Error('Unable to create a framebuffer'); + } + + if (framebuffer.antialias) { + framebuffer.aaFramebuffer = gl.createFramebuffer(); + if (!framebuffer.aaFramebuffer) { + throw new Error('Unable to create a framebuffer for antialiasing'); + } + } + } - if (this.states._useMetalness > 0) { - this.mixedSpecularColor = this.mixedSpecularColor.map( - (mixedSpecularColor, index) => - this.states.curFillColor[index] * this.states._useMetalness + - mixedSpecularColor * (1 - this.states._useMetalness) + validateFramebufferFormats(framebuffer) { + const gl = this.GL; + + if ( + framebuffer.useDepth && + this.webglVersion === constants.WEBGL && + !gl.getExtension('WEBGL_depth_texture') + ) { + console.warn( + 'Unable to create depth textures in this environment. Falling back ' + + 'to a framebuffer without depth.' ); + framebuffer.useDepth = false; } - // TODO: optimize - fillShader.setUniform('uUseVertexColor', this._useVertexColor); - fillShader.setUniform('uMaterialColor', this.states.curFillColor); - fillShader.setUniform('isTexture', !!this.states._tex); - // We need to explicitly set uSampler back to an empty texture here. - // In general, we record the last set texture so we can re-apply it - // the next time a shader is used. However, the texture() function - // works differently and is global p5 state. If the p5 state has - // been cleared, we also need to clear the value in uSampler to match. - fillShader.setUniform('uSampler', this.states._tex || empty); - fillShader.setUniform('uTint', this.states.tint); - - fillShader.setUniform('uHasSetAmbient', this.states._hasSetAmbient); - fillShader.setUniform('uAmbientMatColor', this.states.curAmbientColor); - fillShader.setUniform('uSpecularMatColor', this.mixedSpecularColor); - fillShader.setUniform('uEmissiveMatColor', this.states.curEmissiveColor); - fillShader.setUniform('uSpecular', this.states._useSpecularMaterial); - fillShader.setUniform('uEmissive', this.states._useEmissiveMaterial); - fillShader.setUniform('uShininess', this.states._useShininess); - fillShader.setUniform('uMetallic', this.states._useMetalness); - - this._setImageLightUniforms(fillShader); - - fillShader.setUniform('uUseLighting', this.states.enableLighting); - - const pointLightCount = this.states.pointLightDiffuseColors.length / 3; - fillShader.setUniform('uPointLightCount', pointLightCount); - fillShader.setUniform( - 'uPointLightLocation', - this.states.pointLightPositions - ); - fillShader.setUniform( - 'uPointLightDiffuseColors', - this.states.pointLightDiffuseColors - ); - fillShader.setUniform( - 'uPointLightSpecularColors', - this.states.pointLightSpecularColors - ); + if ( + framebuffer.useDepth && + this.webglVersion === constants.WEBGL && + framebuffer.depthFormat === constants.FLOAT + ) { + console.warn( + 'FLOAT depth format is unavailable in WebGL 1. ' + + 'Defaulting to UNSIGNED_INT.' + ); + framebuffer.depthFormat = constants.UNSIGNED_INT; + } - const directionalLightCount = - this.states.directionalLightDiffuseColors.length / 3; - fillShader.setUniform('uDirectionalLightCount', directionalLightCount); - fillShader.setUniform( - 'uLightingDirection', - this.states.directionalLightDirections - ); - fillShader.setUniform( - 'uDirectionalDiffuseColors', - this.states.directionalLightDiffuseColors - ); - fillShader.setUniform( - 'uDirectionalSpecularColors', - this.states.directionalLightSpecularColors - ); + if (![ + constants.UNSIGNED_BYTE, + constants.FLOAT, + constants.HALF_FLOAT + ].includes(framebuffer.format)) { + console.warn( + 'Unknown Framebuffer format. ' + + 'Please use UNSIGNED_BYTE, FLOAT, or HALF_FLOAT. ' + + 'Defaulting to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + if (framebuffer.useDepth && ![ + constants.UNSIGNED_INT, + constants.FLOAT + ].includes(framebuffer.depthFormat)) { + console.warn( + 'Unknown Framebuffer depth format. ' + + 'Please use UNSIGNED_INT or FLOAT. Defaulting to FLOAT.' + ); + framebuffer.depthFormat = constants.FLOAT; + } - // TODO: sum these here... - const ambientLightCount = this.states.ambientLightColors.length / 3; - this.mixedAmbientLight = [...this.states.ambientLightColors]; + const support = checkWebGLCapabilities(this); + if (!support.float && framebuffer.format === constants.FLOAT) { + console.warn( + 'This environment does not support FLOAT textures. ' + + 'Falling back to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + if ( + framebuffer.useDepth && + !support.float && + framebuffer.depthFormat === constants.FLOAT + ) { + console.warn( + 'This environment does not support FLOAT depth textures. ' + + 'Falling back to UNSIGNED_INT.' + ); + framebuffer.depthFormat = constants.UNSIGNED_INT; + } + if (!support.halfFloat && framebuffer.format === constants.HALF_FLOAT) { + console.warn( + 'This environment does not support HALF_FLOAT textures. ' + + 'Falling back to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } - if (this.states._useMetalness > 0) { - this.mixedAmbientLight = this.mixedAmbientLight.map(ambientColors => { - let mixing = ambientColors - this.states._useMetalness; - return Math.max(0, mixing); - }); + if ( + framebuffer.channels === RGB && + [constants.FLOAT, constants.HALF_FLOAT].includes(framebuffer.format) + ) { + console.warn( + 'FLOAT and HALF_FLOAT formats do not work cross-platform with only ' + + 'RGB channels. Falling back to RGBA.' + ); + framebuffer.channels = RGBA; } - fillShader.setUniform('uAmbientLightCount', ambientLightCount); - fillShader.setUniform('uAmbientColor', this.mixedAmbientLight); + } - const spotLightCount = this.states.spotLightDiffuseColors.length / 3; - fillShader.setUniform('uSpotLightCount', spotLightCount); - fillShader.setUniform('uSpotLightAngle', this.states.spotLightAngle); - fillShader.setUniform('uSpotLightConc', this.states.spotLightConc); - fillShader.setUniform( - 'uSpotLightDiffuseColors', - this.states.spotLightDiffuseColors - ); - fillShader.setUniform( - 'uSpotLightSpecularColors', - this.states.spotLightSpecularColors - ); - fillShader.setUniform('uSpotLightLocation', this.states.spotLightPositions); - fillShader.setUniform( - 'uSpotLightDirection', - this.states.spotLightDirections - ); + recreateFramebufferTextures(framebuffer) { + const gl = this.GL; - fillShader.setUniform( - 'uConstantAttenuation', - this.states.constantAttenuation + const prevBoundTexture = gl.getParameter(gl.TEXTURE_BINDING_2D); + const prevBoundFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); + + const colorTexture = gl.createTexture(); + if (!colorTexture) { + throw new Error('Unable to create color texture'); + } + gl.bindTexture(gl.TEXTURE_2D, colorTexture); + const colorFormat = this._getFramebufferColorFormat(framebuffer); + gl.texImage2D( + gl.TEXTURE_2D, + 0, + colorFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + colorFormat.format, + colorFormat.type, + null ); - fillShader.setUniform('uLinearAttenuation', this.states.linearAttenuation); - fillShader.setUniform( - 'uQuadraticAttenuation', - this.states.quadraticAttenuation + framebuffer.colorTexture = colorTexture; + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.framebuffer); + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + gl.COLOR_ATTACHMENT0, + gl.TEXTURE_2D, + colorTexture, + 0 ); - } - // getting called from _setFillUniforms - _setImageLightUniforms(shader) { - //set uniform values - shader.setUniform('uUseImageLight', this.states.activeImageLight != null); - // true - if (this.states.activeImageLight) { - // this.states.activeImageLight has image as a key - // look up the texture from the diffusedTexture map - let diffusedLight = this.getDiffusedTexture(this.states.activeImageLight); - shader.setUniform('environmentMapDiffused', diffusedLight); - let specularLight = this.getSpecularTexture(this.states.activeImageLight); + if (framebuffer.useDepth) { + // Create the depth texture + const depthTexture = gl.createTexture(); + if (!depthTexture) { + throw new Error('Unable to create depth texture'); + } + const depthFormat = this._getFramebufferDepthFormat(framebuffer); + gl.bindTexture(gl.TEXTURE_2D, depthTexture); + gl.texImage2D( + gl.TEXTURE_2D, + 0, + depthFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + depthFormat.format, + depthFormat.type, + null + ); - shader.setUniform('environmentMapSpecular', specularLight); + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + framebuffer.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, + gl.TEXTURE_2D, + depthTexture, + 0 + ); + framebuffer.depthTexture = depthTexture; } + + // Create separate framebuffer for antialiasing + if (framebuffer.antialias) { + framebuffer.colorRenderbuffer = gl.createRenderbuffer(); + gl.bindRenderbuffer(gl.RENDERBUFFER, framebuffer.colorRenderbuffer); + gl.renderbufferStorageMultisample( + gl.RENDERBUFFER, + Math.max( + 0, + Math.min(framebuffer.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) + ), + colorFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density + ); + + if (framebuffer.useDepth) { + const depthFormat = this._getFramebufferDepthFormat(framebuffer); + framebuffer.depthRenderbuffer = gl.createRenderbuffer(); + gl.bindRenderbuffer(gl.RENDERBUFFER, framebuffer.depthRenderbuffer); + gl.renderbufferStorageMultisample( + gl.RENDERBUFFER, + Math.max( + 0, + Math.min(framebuffer.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) + ), + depthFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density + ); + } + + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.aaFramebuffer); + gl.framebufferRenderbuffer( + gl.FRAMEBUFFER, + gl.COLOR_ATTACHMENT0, + gl.RENDERBUFFER, + framebuffer.colorRenderbuffer + ); + if (framebuffer.useDepth) { + gl.framebufferRenderbuffer( + gl.FRAMEBUFFER, + framebuffer.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, + gl.RENDERBUFFER, + framebuffer.depthRenderbuffer + ); + } + } + + gl.bindTexture(gl.TEXTURE_2D, prevBoundTexture); + gl.bindFramebuffer(gl.FRAMEBUFFER, prevBoundFramebuffer); } - _setPointUniforms(pointShader) { - // set the uniform values - pointShader.setUniform('uMaterialColor', this.states.curStrokeColor); - // @todo is there an instance where this isn't stroke weight? - // should be they be same var? - pointShader.setUniform( - 'uPointSize', - this.states.strokeWeight * this._pixelDensity - ); + /** + * To create a WebGL texture, one needs to supply three pieces of information: + * the type (the data type each channel will be stored as, e.g. int or float), + * the format (the color channels that will each be stored in the previously + * specified type, e.g. rgb or rgba), and the internal format (the specifics + * of how data for each channel, in the aforementioned type, will be packed + * together, such as how many bits to use, e.g. RGBA32F or RGB565.) + * + * The format and channels asked for by the user hint at what these values + * need to be, and the WebGL version affects what options are avaiable. + * This method returns the values for these three properties, given the + * framebuffer's settings. + * + * @private + */ + _getFramebufferColorFormat(framebuffer) { + let type, format, internalFormat; + const gl = this.GL; + + if (framebuffer.format === constants.FLOAT) { + type = gl.FLOAT; + } else if (framebuffer.format === constants.HALF_FLOAT) { + type = this.webglVersion === constants.WEBGL2 + ? gl.HALF_FLOAT + : gl.getExtension('OES_texture_half_float').HALF_FLOAT_OES; + } else { + type = gl.UNSIGNED_BYTE; + } + + if (framebuffer.channels === RGBA) { + format = gl.RGBA; + } else { + format = gl.RGB; + } + + if (this.webglVersion === constants.WEBGL2) { + // https://webgl2fundamentals.org/webgl/lessons/webgl-data-textures.html + const table = { + [gl.FLOAT]: { + [gl.RGBA]: gl.RGBA32F + // gl.RGB32F is not available in Firefox without an alpha channel + }, + [gl.HALF_FLOAT]: { + [gl.RGBA]: gl.RGBA16F + // gl.RGB16F is not available in Firefox without an alpha channel + }, + [gl.UNSIGNED_BYTE]: { + [gl.RGBA]: gl.RGBA8, // gl.RGBA4 + [gl.RGB]: gl.RGB8 // gl.RGB565 + } + }; + internalFormat = table[type][format]; + } else if (framebuffer.format === constants.HALF_FLOAT) { + internalFormat = gl.RGBA; + } else { + internalFormat = format; + } + + return { internalFormat, format, type }; } - /* Binds a buffer to the drawing context - * when passed more than two arguments it also updates or initializes - * the data associated with the buffer + /** + * To create a WebGL texture, one needs to supply three pieces of information: + * the type (the data type each channel will be stored as, e.g. int or float), + * the format (the color channels that will each be stored in the previously + * specified type, e.g. rgb or rgba), and the internal format (the specifics + * of how data for each channel, in the aforementioned type, will be packed + * together, such as how many bits to use, e.g. RGBA32F or RGB565.) + * + * This method takes into account the settings asked for by the user and + * returns values for these three properties that can be used for the + * texture storing depth information. + * + * @private */ - _bindBuffer(buffer, target, values, type, usage) { - if (!target) target = this.GL.ARRAY_BUFFER; - this.GL.bindBuffer(target, buffer); - if (values !== undefined) { - let data = values; - if (values instanceof DataArray) { - data = values.dataArray(); - } else if (!(data instanceof (type || Float32Array))) { - data = new (type || Float32Array)(data); + _getFramebufferDepthFormat(framebuffer) { + let type, format, internalFormat; + const gl = this.GL; + + if (framebuffer.useStencil) { + if (framebuffer.depthFormat === constants.FLOAT) { + type = gl.FLOAT_32_UNSIGNED_INT_24_8_REV; + } else if (this.webglVersion === constants.WEBGL2) { + type = gl.UNSIGNED_INT_24_8; + } else { + type = gl.getExtension('WEBGL_depth_texture').UNSIGNED_INT_24_8_WEBGL; + } + } else { + if (framebuffer.depthFormat === constants.FLOAT) { + type = gl.FLOAT; + } else { + type = gl.UNSIGNED_INT; } - this.GL.bufferData(target, data, usage || this.GL.STATIC_DRAW); } + + if (framebuffer.useStencil) { + format = gl.DEPTH_STENCIL; + } else { + format = gl.DEPTH_COMPONENT; + } + + if (framebuffer.useStencil) { + if (framebuffer.depthFormat === constants.FLOAT) { + internalFormat = gl.DEPTH32F_STENCIL8; + } else if (this.webglVersion === constants.WEBGL2) { + internalFormat = gl.DEPTH24_STENCIL8; + } else { + internalFormat = gl.DEPTH_STENCIL; + } + } else if (this.webglVersion === constants.WEBGL2) { + if (framebuffer.depthFormat === constants.FLOAT) { + internalFormat = gl.DEPTH_COMPONENT32F; + } else { + internalFormat = gl.DEPTH_COMPONENT24; + } + } else { + internalFormat = gl.DEPTH_COMPONENT; + } + + return { internalFormat, format, type }; } - /////////////////////////////// - //// UTILITY FUNCTIONS - ////////////////////////////// - _arraysEqual(a, b) { - const aLength = a.length; - if (aLength !== b.length) return false; - return a.every((ai, i) => ai === b[i]); + _deleteFramebufferTexture(texture) { + const gl = this.GL; + gl.deleteTexture(texture.rawTexture().texture); + this.textures.delete(texture); } - _isTypedArray(arr) { - return [ - Float32Array, - Float64Array, - Int16Array, - Uint16Array, - Uint32Array - ].some(x => arr instanceof x); + deleteFramebufferTextures(framebuffer) { + this._deleteFramebufferTexture(framebuffer.color) + if (framebuffer.depth) this._deleteFramebufferTexture(framebuffer.depth); + const gl = this.GL; + if (framebuffer.colorRenderbuffer) gl.deleteRenderbuffer(framebuffer.colorRenderbuffer); + if (framebuffer.depthRenderbuffer) gl.deleteRenderbuffer(framebuffer.depthRenderbuffer); } - /** - * turn a p5.Vector Array into a one dimensional number array - * @private - * @param {p5.Vector[]} arr an array of p5.Vector - * @return {Number[]} a one dimensional array of numbers - * [p5.Vector(1, 2, 3), p5.Vector(4, 5, 6)] -> - * [1, 2, 3, 4, 5, 6] - */ - _vToNArray(arr) { - return arr.flatMap(item => [item.x, item.y, item.z]); + deleteFramebufferResources(framebuffer) { + const gl = this.GL; + gl.deleteFramebuffer(framebuffer.framebuffer); + if (framebuffer.aaFramebuffer) { + gl.deleteFramebuffer(framebuffer.aaFramebuffer); + } + if (framebuffer.depthRenderbuffer) { + gl.deleteRenderbuffer(framebuffer.depthRenderbuffer); + } + if (framebuffer.colorRenderbuffer) { + gl.deleteRenderbuffer(framebuffer.colorRenderbuffer); + } + } + + getFramebufferToBind(framebuffer) { + if (framebuffer.antialias) { + return framebuffer.aaFramebuffer; + } else { + return framebuffer.framebuffer; + } + } + + updateFramebufferTexture(framebuffer, property) { + if (framebuffer.antialias) { + const gl = this.GL; + gl.bindFramebuffer(gl.READ_FRAMEBUFFER, framebuffer.aaFramebuffer); + gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, framebuffer.framebuffer); + const partsToCopy = { + colorTexture: [ + gl.COLOR_BUFFER_BIT, + framebuffer.colorP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST + ], + }; + if (framebuffer.useDepth) { + partsToCopy.depthTexture = [ + gl.DEPTH_BUFFER_BIT, + framebuffer.depthP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST + ]; + } + const [flag, filter] = partsToCopy[property]; + gl.blitFramebuffer( + 0, + 0, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + 0, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + flag, + filter + ); + + const activeFbo = this.activeFramebuffer(); + this.bindFramebuffer(activeFbo); + } + } + + bindFramebuffer(framebuffer) { + const gl = this.GL; + gl.bindFramebuffer( + gl.FRAMEBUFFER, + framebuffer + ? this.getFramebufferToBind(framebuffer) + : null + ); + } + + readFramebufferPixels(framebuffer) { + const gl = this.GL; + const prevFramebuffer = this.activeFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.framebuffer); + const colorFormat = this._getFramebufferColorFormat(framebuffer); + const pixels = readPixelsWebGL( + framebuffer.pixels, + gl, + framebuffer.framebuffer, + 0, + 0, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + colorFormat.format, + colorFormat.type + ); + this.bindFramebuffer(prevFramebuffer); + return pixels; + } + + readFramebufferPixel(framebuffer, x, y) { + const colorFormat = this._getFramebufferColorFormat(framebuffer); + return readPixelWebGL( + this.GL, + framebuffer.framebuffer, + x, + y, + colorFormat.format, + colorFormat.type + ); + } + + readFramebufferRegion(framebuffer, x, y, w, h) { + const gl = this.GL; + const colorFormat = this._getFramebufferColorFormat(framebuffer); + + const rawData = readPixelsWebGL( + undefined, + gl, + framebuffer.framebuffer, + x * framebuffer.density, + y * framebuffer.density, + w * framebuffer.density, + h * framebuffer.density, + colorFormat.format, + colorFormat.type + ); + + // Framebuffer data might be either a Uint8Array or Float32Array + // depending on its format, and it may or may not have an alpha channel. + // To turn it into an image, we have to normalize the data into a + // Uint8ClampedArray with alpha. + const fullData = new Uint8ClampedArray( + w * h * framebuffer.density * framebuffer.density * 4 + ); + // Default channels that aren't in the framebuffer (e.g. alpha, if the + // framebuffer is in RGB mode instead of RGBA) to 255 + fullData.fill(255); + + const channels = colorFormat.format === gl.RGB ? 3 : 4; + for (let yPos = 0; yPos < h * framebuffer.density; yPos++) { + for (let xPos = 0; xPos < w * framebuffer.density; xPos++) { + for (let channel = 0; channel < 4; channel++) { + const idx = (yPos * w * framebuffer.density + xPos) * 4 + channel; + if (channel < channels) { + // Find the index of this pixel in `rawData`, which might have a + // different number of channels + const rawDataIdx = channels === 4 + ? idx + : (yPos * w * framebuffer.density + xPos) * channels + channel; + fullData[idx] = rawData[rawDataIdx]; + } + } + } + } + + // Create image from data + const region = new Image(w * framebuffer.density, h * framebuffer.density); + region.imageData = region.canvas.getContext('2d').createImageData( + region.width, + region.height + ); + region.imageData.data.set(fullData); + region.pixels = region.imageData.data; + region.updatePixels(); + if (framebuffer.density !== 1) { + region.pixelDensity(framebuffer.density); + } + return region; + } + + updateFramebufferPixels(framebuffer) { + const gl = this.GL; + framebuffer.colorP5Texture.bindTexture(); + const colorFormat = this._getFramebufferColorFormat(framebuffer); + + const channels = colorFormat.format === gl.RGBA ? 4 : 3; + const len = framebuffer.width * framebuffer.height * framebuffer.density * framebuffer.density * channels; + const TypedArrayClass = colorFormat.type === gl.UNSIGNED_BYTE ? Uint8Array : Float32Array; + + if (!(framebuffer.pixels instanceof TypedArrayClass) || framebuffer.pixels.length !== len) { + throw new Error( + 'The pixels array has not been set correctly. Please call loadPixels() before updatePixels().' + ); + } + + gl.texImage2D( + gl.TEXTURE_2D, + 0, + colorFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + colorFormat.format, + colorFormat.type, + framebuffer.pixels + ); + framebuffer.colorP5Texture.unbindTexture(); + framebuffer.dirty.colorTexture = false; + + const prevFramebuffer = this.activeFramebuffer(); + if (framebuffer.antialias) { + // We need to make sure the antialiased framebuffer also has the updated + // pixels so that if more is drawn to it, it goes on top of the updated + // pixels instead of replacing them. + // We can't blit the framebuffer to the multisampled antialias + // framebuffer to leave both in the same state, so instead we have + // to use image() to put the framebuffer texture onto the antialiased + // framebuffer. + framebuffer.begin(); + this.push(); + this.states.setValue('imageMode', constants.CORNER); + this.setCamera(framebuffer.filterCamera); + this.resetMatrix(); + this.states.setValue('strokeColor', null); + this.clear(); + this._drawingFilter = true; + this.image( + framebuffer, + 0, 0, + framebuffer.width, framebuffer.height, + -this.width / 2, -this.height / 2, + this.width, this.height + ); + this._drawingFilter = false; + this.pop(); + if (framebuffer.useDepth) { + gl.clearDepth(1); + gl.clear(gl.DEPTH_BUFFER_BIT); + } + framebuffer.end(); + } else { + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.framebuffer); + if (framebuffer.useDepth) { + gl.clearDepth(1); + gl.clear(gl.DEPTH_BUFFER_BIT); + } + this.bindFramebuffer(prevFramebuffer); + } } } @@ -2703,51 +2132,7 @@ function rendererGL(p5, fn) { * @param {Object} obj object with key-value pairs */ fn.setAttributes = function (key, value) { - if (typeof this._glAttributes === 'undefined') { - console.log( - 'You are trying to use setAttributes on a p5.Graphics object ' + - 'that does not use a WEBGL renderer.' - ); - return; - } - let unchanged = true; - if (typeof value !== 'undefined') { - //first time modifying the attributes - if (this._glAttributes === null) { - this._glAttributes = {}; - } - if (this._glAttributes[key] !== value) { - //changing value of previously altered attribute - this._glAttributes[key] = value; - unchanged = false; - } - //setting all attributes with some change - } else if (key instanceof Object) { - if (this._glAttributes !== key) { - this._glAttributes = key; - unchanged = false; - } - } - //@todo_FES - if (!this._renderer.isP3D || unchanged) { - return; - } - - if (!this._setupDone) { - if (this._renderer.geometryBufferCache.numCached() > 0) { - p5._friendlyError( - 'Sorry, Could not set the attributes, you need to call setAttributes() ' + - 'before calling the other drawing methods in setup()' - ); - return; - } - } - - this._renderer._resetContext(); - - if (this._renderer.states.curCamera) { - this._renderer.states.curCamera._renderer = this._renderer; - } + return this._renderer._setAttributes(key, value); }; /** @@ -2764,109 +2149,9 @@ function rendererGL(p5, fn) { p5.renderers[constants.WEBGL2] = p5.RendererGL; } -/** - * @private - * @param {Uint8Array|Float32Array|undefined} pixels An existing pixels array to reuse if the size is the same - * @param {WebGLRenderingContext} gl The WebGL context - * @param {WebGLFramebuffer|null} framebuffer The Framebuffer to read - * @param {Number} x The x coordiante to read, premultiplied by pixel density - * @param {Number} y The y coordiante to read, premultiplied by pixel density - * @param {Number} width The width in pixels to be read (factoring in pixel density) - * @param {Number} height The height in pixels to be read (factoring in pixel density) - * @param {GLEnum} format Either RGB or RGBA depending on how many channels to read - * @param {GLEnum} type The datatype of each channel, e.g. UNSIGNED_BYTE or FLOAT - * @param {Number|undefined} flipY If provided, the total height with which to flip the y axis about - * @returns {Uint8Array|Float32Array} pixels A pixels array with the current state of the - * WebGL context read into it - */ -export function readPixelsWebGL( - pixels, - gl, - framebuffer, - x, - y, - width, - height, - format, - type, - flipY -) { - // Record the currently bound framebuffer so we can go back to it after, and - // bind the framebuffer we want to read from - const prevFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); - gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer); - - const channels = format === gl.RGBA ? 4 : 3; - - // Make a pixels buffer if it doesn't already exist - const len = width * height * channels; - const TypedArrayClass = type === gl.UNSIGNED_BYTE ? Uint8Array : Float32Array; - if (!(pixels instanceof TypedArrayClass) || pixels.length !== len) { - pixels = new TypedArrayClass(len); - } - - gl.readPixels( - x, - flipY ? flipY - y - height : y, - width, - height, - format, - type, - pixels - ); - - // Re-bind whatever was previously bound - gl.bindFramebuffer(gl.FRAMEBUFFER, prevFramebuffer); - - if (flipY) { - // WebGL pixels are inverted compared to 2D pixels, so we have to flip - // the resulting rows. Adapted from https://stackoverflow.com/a/41973289 - const halfHeight = Math.floor(height / 2); - const tmpRow = new TypedArrayClass(width * channels); - for (let y = 0; y < halfHeight; y++) { - const topOffset = y * width * 4; - const bottomOffset = (height - y - 1) * width * 4; - tmpRow.set(pixels.subarray(topOffset, topOffset + width * 4)); - pixels.copyWithin(topOffset, bottomOffset, bottomOffset + width * 4); - pixels.set(tmpRow, bottomOffset); - } - } - - return pixels; -} - -/** - * @private - * @param {WebGLRenderingContext} gl The WebGL context - * @param {WebGLFramebuffer|null} framebuffer The Framebuffer to read - * @param {Number} x The x coordinate to read, premultiplied by pixel density - * @param {Number} y The y coordinate to read, premultiplied by pixel density - * @param {GLEnum} format Either RGB or RGBA depending on how many channels to read - * @param {GLEnum} type The datatype of each channel, e.g. UNSIGNED_BYTE or FLOAT - * @param {Number|undefined} flipY If provided, the total height with which to flip the y axis about - * @returns {Number[]} pixels The channel data for the pixel at that location - */ -export function readPixelWebGL(gl, framebuffer, x, y, format, type, flipY) { - // Record the currently bound framebuffer so we can go back to it after, and - // bind the framebuffer we want to read from - const prevFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); - gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer); - - const channels = format === gl.RGBA ? 4 : 3; - const TypedArrayClass = type === gl.UNSIGNED_BYTE ? Uint8Array : Float32Array; - const pixels = new TypedArrayClass(channels); - - gl.readPixels(x, flipY ? flipY - y - 1 : y, 1, 1, format, type, pixels); - - // Re-bind whatever was previously bound - gl.bindFramebuffer(gl.FRAMEBUFFER, prevFramebuffer); - - return Array.from(pixels); -} - export default rendererGL; export { RendererGL }; -if (typeof p5 !== 'undefined') { +if (typeof p5 !== "undefined") { rendererGL(p5, p5.prototype); } diff --git a/src/webgl/p5.Shader.js b/src/webgl/p5.Shader.js index cecd7ae992..186b23ddd6 100644 --- a/src/webgl/p5.Shader.js +++ b/src/webgl/p5.Shader.js @@ -6,13 +6,8 @@ * @requires core */ -import { Texture } from './p5.Texture'; - class Shader { constructor(renderer, vertSrc, fragSrc, options = {}) { - // TODO: adapt this to not take ids, but rather, - // to take the source for a vertex and fragment shader - // to enable custom shaders at some later date this._renderer = renderer; this._vertSrc = vertSrc; this._fragSrc = fragSrc; @@ -126,46 +121,7 @@ class Shader { } shaderSrc(src, shaderType) { - const main = 'void main'; - let [preMain, postMain] = src.split(main); - - let hooks = ''; - let defines = ''; - for (const key in this.hooks.uniforms) { - hooks += `uniform ${key};\n`; - } - if (this.hooks.declarations) { - hooks += this.hooks.declarations + '\n'; - } - if (this.hooks[shaderType].declarations) { - hooks += this.hooks[shaderType].declarations + '\n'; - } - for (const hookDef in this.hooks.helpers) { - hooks += `${hookDef}${this.hooks.helpers[hookDef]}\n`; - } - for (const hookDef in this.hooks[shaderType]) { - if (hookDef === 'declarations') continue; - const [hookType, hookName] = hookDef.split(' '); - - // Add a #define so that if the shader wants to use preprocessor directives to - // optimize away the extra function calls in main, it can do so - if (this.hooks.modified[shaderType][hookDef]) { - defines += '#define AUGMENTED_HOOK_' + hookName + '\n'; - } - - hooks += - hookType + ' HOOK_' + hookName + this.hooks[shaderType][hookDef] + '\n'; - } - - // Allow shaders to specify the location of hook #define statements. Normally these - // go after function definitions, but one might want to have them defined earlier - // in order to only conditionally make uniforms. - if (preMain.indexOf('#define HOOK_DEFINES') !== -1) { - preMain = preMain.replace('#define HOOK_DEFINES', '\n' + defines + '\n'); - defines = ''; - } - - return preMain + '\n' + defines + hooks + main + postMain; + return this._renderer.populateHooks(this, src, shaderType); } /** @@ -515,68 +471,23 @@ class Shader { * @private */ init() { - if (this._glProgram === 0 /* or context is stale? */) { - const gl = this._renderer.GL; - - // @todo: once custom shading is allowed, - // friendly error messages should be used here to share - // compiler and linker errors. - - //set up the shader by - // 1. creating and getting a gl id for the shader program, - // 2. compliling its vertex & fragment sources, - // 3. linking the vertex and fragment shaders - this._vertShader = gl.createShader(gl.VERTEX_SHADER); - //load in our default vertex shader - gl.shaderSource(this._vertShader, this.vertSrc()); - gl.compileShader(this._vertShader); - // if our vertex shader failed compilation? - if (!gl.getShaderParameter(this._vertShader, gl.COMPILE_STATUS)) { - const glError = gl.getShaderInfoLog(this._vertShader); - if (typeof IS_MINIFIED !== 'undefined') { - console.error(glError); - } else { - p5._friendlyError( - `Yikes! An error occurred compiling the vertex shader:${glError}` - ); - throw glError; - } - return null; - } - - this._fragShader = gl.createShader(gl.FRAGMENT_SHADER); - //load in our material frag shader - gl.shaderSource(this._fragShader, this.fragSrc()); - gl.compileShader(this._fragShader); - // if our frag shader failed compilation? - if (!gl.getShaderParameter(this._fragShader, gl.COMPILE_STATUS)) { - const glError = gl.getShaderInfoLog(this._fragShader); - if (typeof IS_MINIFIED !== 'undefined') { - console.error(glError); - } else { - p5._friendlyError( - `Darn! An error occurred compiling the fragment shader:${glError}` - ); - throw glError; - } - return null; - } - - this._glProgram = gl.createProgram(); - gl.attachShader(this._glProgram, this._vertShader); - gl.attachShader(this._glProgram, this._fragShader); - gl.linkProgram(this._glProgram); - if (!gl.getProgramParameter(this._glProgram, gl.LINK_STATUS)) { - p5._friendlyError( - `Snap! Error linking shader program: ${gl.getProgramInfoLog( - this._glProgram - )}` + // If the shader is uninitialized or context was lost + if (!this._initialized) { + try { + this._renderer._initShader(this); // Backend-specific shader init + } catch (err) { + throw new Error( + `Whoops! Something went wrong initializing the shader:\n${err.message || err}` ); } this._loadAttributes(); this._loadUniforms(); + this._renderer._finalizeShader(this); + + this._initialized = true; } + return this; } @@ -813,28 +724,7 @@ class Shader { if (this._loadedAttributes) { return; } - - this.attributes = {}; - - const gl = this._renderer.GL; - - const numAttributes = gl.getProgramParameter( - this._glProgram, - gl.ACTIVE_ATTRIBUTES - ); - for (let i = 0; i < numAttributes; ++i) { - const attributeInfo = gl.getActiveAttrib(this._glProgram, i); - const name = attributeInfo.name; - const location = gl.getAttribLocation(this._glProgram, name); - const attribute = {}; - attribute.name = name; - attribute.location = location; - attribute.index = i; - attribute.type = attributeInfo.type; - attribute.size = attributeInfo.size; - this.attributes[name] = attribute; - } - + this.attributes = this._renderer._getShaderAttributes(this); this._loadedAttributes = true; } @@ -848,68 +738,41 @@ class Shader { return; } - const gl = this._renderer.GL; + this.uniforms = {}; + this.samplers = []; + + const uniformMetadata = this._renderer.getUniformMetadata(this); - // Inspect shader and cache uniform info - const numUniforms = gl.getProgramParameter( - this._glProgram, - gl.ACTIVE_UNIFORMS - ); + for (const meta of uniformMetadata) { + const uniform = { + ...meta, + _cachedData: undefined, + }; - let samplerIndex = 0; - for (let i = 0; i < numUniforms; ++i) { - const uniformInfo = gl.getActiveUniform(this._glProgram, i); - const uniform = {}; - uniform.location = gl.getUniformLocation( - this._glProgram, - uniformInfo.name - ); - uniform.size = uniformInfo.size; - let uniformName = uniformInfo.name; - //uniforms that are arrays have their name returned as - //someUniform[0] which is a bit silly so we trim it - //off here. The size property tells us that its an array - //so we dont lose any information by doing this - if (uniformInfo.size > 1) { - uniformName = uniformName.substring(0, uniformName.indexOf('[0]')); - } - uniform.name = uniformName; - uniform.type = uniformInfo.type; - uniform._cachedData = undefined; - if (uniform.type === gl.SAMPLER_2D) { - uniform.samplerIndex = samplerIndex; - samplerIndex++; + if (uniform.isSampler) { this.samplers.push(uniform); } - uniform.isArray = - uniformInfo.size > 1 || - uniform.type === gl.FLOAT_MAT3 || - uniform.type === gl.FLOAT_MAT4 || - uniform.type === gl.FLOAT_VEC2 || - uniform.type === gl.FLOAT_VEC3 || - uniform.type === gl.FLOAT_VEC4 || - uniform.type === gl.INT_VEC2 || - uniform.type === gl.INT_VEC4 || - uniform.type === gl.INT_VEC3; - - this.uniforms[uniformName] = uniform; + this.uniforms[uniform.name] = uniform; } - this._loadedUniforms = true; - } - compile() { - // TODO + this._loadedUniforms = true; } /** * initializes (if needed) and binds the shader program. * @private */ - bindShader() { + bindShader(shaderType, options) { + if (this.shaderType && this.shaderType !== shaderType) { + throw new Error( + `You've already used this shader as a ${this.shaderType} shader, but are now using it as a ${shaderType}.` + ); + } + this.shaderType = shaderType; this.init(); if (!this._bound) { - this.useProgram(); + this.useProgram(options); this._bound = true; } } @@ -926,17 +789,21 @@ class Shader { return this; } + /** + * @private + */ bindTextures() { - const gl = this._renderer.GL; - const empty = this._renderer._getEmptyTexture(); for (const uniform of this.samplers) { + if (uniform.noData) continue; let tex = uniform.texture; if ( tex === undefined || ( - false && + // Make sure we unbind a framebuffer uniform if it's the same + // framebuffer that is actvely being drawn to in order to + // prevent a feedback cycle tex.isFramebufferTexture && !tex.src.framebuffer.antialias && tex.src.framebuffer === this._renderer.activeFramebuffer() @@ -947,30 +814,17 @@ class Shader { // so we supply a default texture instead. uniform.texture = tex = empty; } - gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); - tex.bindTexture(); - tex.update(); - gl.uniform1i(uniform.location, uniform.samplerIndex); - } - } - - updateTextures() { - for (const uniform of this.samplers) { - const tex = uniform.texture; - if (tex) { - tex.update(); - } + this._renderer._updateTexture(uniform, tex); } } + /** + * @private + */ unbindTextures() { - const gl = this._renderer.GL; - const empty = this._renderer._getEmptyTexture(); for (const uniform of this.samplers) { if (uniform.texture?.isFramebufferTexture) { - gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); - empty.bindTexture(); - gl.uniform1i(uniform.location, uniform.samplerIndex); + this._renderer._unbindFramebufferTexture(uniform); } } } @@ -979,10 +833,9 @@ class Shader { * @chainable * @private */ - useProgram() { - const gl = this._renderer.GL; + useProgram(options) { if (this._renderer._curShader !== this) { - gl.useProgram(this._glProgram); + this._renderer._useShader(this); this._renderer._curShader = this; } return this; @@ -1222,14 +1075,17 @@ class Shader { * * */ - setUniform(uniformName, data) { + setUniform(uniformName, rawData) { this.init(); const uniform = this.uniforms[uniformName]; if (!uniform) { return; } - const gl = this._renderer.GL; + + const data = this._renderer._mapUniformData + ? this._renderer._mapUniformData(uniform, rawData) + : rawData; if (uniform.isArray) { if ( @@ -1250,141 +1106,7 @@ class Shader { } } - const location = uniform.location; - - this.useProgram(); - - switch (uniform.type) { - case gl.BOOL: - if (data === true) { - gl.uniform1i(location, 1); - } else { - gl.uniform1i(location, 0); - } - break; - case gl.INT: - if (uniform.size > 1) { - data.length && gl.uniform1iv(location, data); - } else { - gl.uniform1i(location, data); - } - break; - case gl.FLOAT: - if (uniform.size > 1) { - data.length && gl.uniform1fv(location, data); - } else { - gl.uniform1f(location, data); - } - break; - case gl.FLOAT_MAT3: - gl.uniformMatrix3fv(location, false, data); - break; - case gl.FLOAT_MAT4: - gl.uniformMatrix4fv(location, false, data); - break; - case gl.FLOAT_VEC2: - if (uniform.size > 1) { - data.length && gl.uniform2fv(location, data); - } else { - gl.uniform2f(location, data[0], data[1]); - } - break; - case gl.FLOAT_VEC3: - if (uniform.size > 1) { - data.length && gl.uniform3fv(location, data); - } else { - gl.uniform3f(location, data[0], data[1], data[2]); - } - break; - case gl.FLOAT_VEC4: - if (uniform.size > 1) { - data.length && gl.uniform4fv(location, data); - } else { - gl.uniform4f(location, data[0], data[1], data[2], data[3]); - } - break; - case gl.INT_VEC2: - if (uniform.size > 1) { - data.length && gl.uniform2iv(location, data); - } else { - gl.uniform2i(location, data[0], data[1]); - } - break; - case gl.INT_VEC3: - if (uniform.size > 1) { - data.length && gl.uniform3iv(location, data); - } else { - gl.uniform3i(location, data[0], data[1], data[2]); - } - break; - case gl.INT_VEC4: - if (uniform.size > 1) { - data.length && gl.uniform4iv(location, data); - } else { - gl.uniform4i(location, data[0], data[1], data[2], data[3]); - } - break; - case gl.SAMPLER_2D: - if (typeof data == 'number') { - if ( - data < gl.TEXTURE0 || - data > gl.TEXTURE31 || - data !== Math.ceil(data) - ) { - console.log( - '🌸 p5.js says: ' + - "You're trying to use a number as the data for a texture." + - 'Please use a texture.' - ); - return this; - } - gl.activeTexture(data); - gl.uniform1i(location, data); - } else { - gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); - uniform.texture = - data instanceof Texture ? data : this._renderer.getTexture(data); - gl.uniform1i(location, uniform.samplerIndex); - if (uniform.texture.src.gifProperties) { - uniform.texture.src._animateGif(this._renderer._pInst); - } - } - break; - case gl.SAMPLER_CUBE: - case gl.SAMPLER_3D: - case gl.SAMPLER_2D_SHADOW: - case gl.SAMPLER_2D_ARRAY: - case gl.SAMPLER_2D_ARRAY_SHADOW: - case gl.SAMPLER_CUBE_SHADOW: - case gl.INT_SAMPLER_2D: - case gl.INT_SAMPLER_3D: - case gl.INT_SAMPLER_CUBE: - case gl.INT_SAMPLER_2D_ARRAY: - case gl.UNSIGNED_INT_SAMPLER_2D: - case gl.UNSIGNED_INT_SAMPLER_3D: - case gl.UNSIGNED_INT_SAMPLER_CUBE: - case gl.UNSIGNED_INT_SAMPLER_2D_ARRAY: - if (typeof data !== 'number') { - break; - } - if ( - data < gl.TEXTURE0 || - data > gl.TEXTURE31 || - data !== Math.ceil(data) - ) { - console.log( - '🌸 p5.js says: ' + - "You're trying to use a number as the data for a texture." + - 'Please use a texture.' - ); - break; - } - gl.activeTexture(data); - gl.uniform1i(location, data); - break; - //@todo complete all types - } - return this; + this._renderer.updateUniformValue(this, uniform, data); } /** @@ -1401,45 +1123,12 @@ class Shader { `The attribute "${attr.name}"passed to enableAttrib does not belong to this shader.` ); } - const loc = attr.location; - if (loc !== -1) { - const gl = this._renderer.GL; - // Enable register even if it is disabled - if (!this._renderer.registerEnabled.has(loc)) { - gl.enableVertexAttribArray(loc); - // Record register availability - this._renderer.registerEnabled.add(loc); - } - this._renderer.GL.vertexAttribPointer( - loc, - size, - type || gl.FLOAT, - normalized || false, - stride || 0, - offset || 0 - ); - } - } - return this; - } - /** - * Once all buffers have been bound, this checks to see if there are any - * remaining active attributes, likely left over from previous renders, - * and disables them so that they don't affect rendering. - * @private - */ - disableRemainingAttributes() { - for (const location of this._renderer.registerEnabled.values()) { - if ( - !Object.keys(this.attributes).some( - key => this.attributes[key].location === location - ) - ) { - this._renderer.GL.disableVertexAttribArray(location); - this._renderer.registerEnabled.delete(location); + if (attr.location !== -1) { + this._renderer._enableAttrib(this, attr, size, type, normalized, stride, offset); } } + return this; } }; diff --git a/src/webgl/p5.Texture.js b/src/webgl/p5.Texture.js index 17056f8106..d1c45b84f1 100644 --- a/src/webgl/p5.Texture.js +++ b/src/webgl/p5.Texture.js @@ -15,24 +15,29 @@ import { Graphics } from '../core/p5.Graphics'; import { FramebufferTexture } from './p5.Framebuffer'; class Texture { - constructor (renderer, obj, settings) { + constructor (renderer, obj, settings = {}) { this._renderer = renderer; - const gl = this._renderer.GL; + this.src = obj; - settings = settings || {}; + this.format = settings.format || 'rgba8unorm'; + this.minFilter = settings.minFilter || constants.LINEAR; + this.magFilter = settings.magFilter || constants.LINEAR; + this.wrapS = settings.wrapS || renderer.states.textureWrapX; + this.wrapT = settings.wrapT || renderer.states.textureWrapY; + this.dataType = settings.dataType || 'uint8'; - this.src = obj; - this.glTex = undefined; - this.glTarget = gl.TEXTURE_2D; - this.glFormat = settings.format || gl.RGBA; - this.mipmaps = false; - this.glMinFilter = settings.minFilter || gl.LINEAR; - this.glMagFilter = settings.magFilter || gl.LINEAR; - this.glWrapS = settings.wrapS || gl.CLAMP_TO_EDGE; - this.glWrapT = settings.wrapT || gl.CLAMP_TO_EDGE; - this.glDataType = settings.dataType || gl.UNSIGNED_BYTE; + this.textureHandle = null; + + this._detectSourceType(); + + const textureData = this._getTextureDataFromSource(); + this.width = textureData.width; + this.height = textureData.height; + this.init(textureData); + } + /* const support = checkWebGLCapabilities(renderer); if (this.glFormat === gl.HALF_FLOAT && !support.halfFloat) { console.log('This device does not support dataType HALF_FLOAT. Falling back to FLOAT.'); @@ -60,37 +65,29 @@ class Texture { if (this.glMinFilter === gl.LINEAR) this.glMinFilter = gl.NEAREST; if (this.glMagFilter === gl.LINEAR) this.glMagFilter = gl.NEAREST; } + }*/ - // used to determine if this texture might need constant updating - // because it is a video or gif. - this.isSrcMediaElement = false; - typeof MediaElement !== 'undefined' && obj instanceof MediaElement; - this._videoPrevUpdateTime = 0; - this.isSrcHTMLElement = - typeof Element !== 'undefined' && - obj instanceof Element && - !(obj instanceof Graphics) && - !(obj instanceof Renderer); + _detectSourceType() { + const obj = this.src; + this.isFramebufferTexture = obj instanceof FramebufferTexture; this.isSrcP5Image = obj instanceof Image; this.isSrcP5Graphics = obj instanceof Graphics; this.isSrcP5Renderer = obj instanceof Renderer; - this.isImageData = - typeof ImageData !== 'undefined' && obj instanceof ImageData; - this.isFramebufferTexture = obj instanceof FramebufferTexture; - - const textureData = this._getTextureDataFromSource(); - this.width = textureData.width; - this.height = textureData.height; - - this.init(textureData); - return this; + this.isImageData = typeof ImageData !== 'undefined' && obj instanceof ImageData; + this.isSrcMediaElement = + typeof MediaElement !== 'undefined' && obj instanceof MediaElement; + this.isSrcHTMLElement = + typeof Element !== 'undefined' && + obj instanceof Element && + !this.isSrcMediaElement && + !this.isSrcP5Graphics && + !this.isSrcP5Renderer; } remove() { - if (this.glTex) { - const gl = this._renderer.GL; - gl.deleteTexture(this.glTex); - this.glTex = undefined; + if (this.textureHandle) { + this._renderer.deleteTexture(this.textureHandle); + this.textureHandle = null; } } @@ -123,54 +120,50 @@ class Texture { * tries to upload the texture for the first time if data is * already available. */ - init (data) { - const gl = this._renderer.GL; + init(textureData) { if (!this.isFramebufferTexture) { - this.glTex = gl.createTexture(); + this.textureHandle = this._renderer.createTexture({ + format: this.format, + dataType: this.dataType, + width: textureData.width, + height: textureData.height, + }); + } else { + this.textureHandle = this._renderer.createFramebufferTextureHandle(this.src); } - this.glWrapS = this._renderer.states.textureWrapX; - this.glWrapT = this._renderer.states.textureWrapY; + this._renderer.setTextureParams(this, { + minFilter: this.minFilter, + magFilter: this.magFilter, + wrapS: this.wrapS, + wrapT: this.wrapT + }); - this.setWrapMode(this.glWrapS, this.glWrapT); this.bindTexture(); - //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, this.glMagFilter); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this.glMinFilter); - - if (this.isFramebufferTexture) { - // Do nothing, the framebuffer manages its own content - } else if ( - this.width === 0 || - this.height === 0 || - (this.isSrcMediaElement && !this.src.loadedmetadata) - ) { - // assign a 1×1 empty texture initially, because data is not yet ready, - // so that no errors occur in gl console! - const tmpdata = new Uint8Array([1, 1, 1, 1]); - gl.texImage2D( - this.glTarget, - 0, - gl.RGBA, - 1, + if (this._shouldDeferUpload()) { + this._renderer.uploadTextureFromData( + this.textureHandle, + new Uint8Array(1, 1, 1, 1), 1, - 0, - this.glFormat, - this.glDataType, - tmpdata + 1 ); - } else { - // data is ready: just push the texture! - gl.texImage2D( - this.glTarget, - 0, - this.glFormat, - this.glFormat, - this.glDataType, - data + } else if (!this.isFramebufferTexture) { + this._renderer.uploadTextureFromSource( + this.textureHandle, + textureData ); } + + this.unbindTexture(); + } + + _shouldDeferUpload() { + return ( + this.width === 0 || + this.height === 0 || + (this.isSrcMediaElement && !this.src.loadedmetadata) + ); } /** @@ -179,7 +172,22 @@ class Texture { * possible or to expensive to do a calculation to determine wheter or * not the data has occurred, this method simply re-uploads the texture. */ - update () { + update() { + const textureData = this._getTextureDataFromSource(); + if (!textureData) return false; + + let updated = false; + + if (this._shouldUpdate(textureData)) { + this.bindTexture(); + this._renderer.uploadTextureFromSource(this.textureHandle, textureData); + updated = true; + } + + return updated; + } + + _shouldUpdate(textureData) { const data = this.src; if (data.width === 0 || data.height === 0) { return false; // nothing to do! @@ -192,10 +200,7 @@ class Texture { return false; } - const textureData = this._getTextureDataFromSource(); let updated = false; - - const gl = this._renderer.GL; // pull texture from data, make sure width & height are appropriate if ( textureData.width !== this.width || @@ -260,174 +265,40 @@ class Texture { updated = true; } - if (updated) { - this.bindTexture(); - gl.texImage2D( - this.glTarget, - 0, - this.glFormat, - this.glFormat, - this.glDataType, - textureData - ); - } - return updated; } - /** - * Binds the texture to the appropriate GL target. - */ - bindTexture () { - // bind texture using gl context + glTarget and - // generated gl texture object - const gl = this._renderer.GL; - gl.bindTexture(this.glTarget, this.getTexture()); - + bindTexture() { + this._renderer.bindTexture(this); return this; } - /** - * Unbinds the texture from the appropriate GL target. - */ unbindTexture () { - // unbind per above, disable texturing on glTarget - const gl = this._renderer.GL; - gl.bindTexture(this.glTarget, null); + this._renderer.unbindTexture(); } getTexture() { if (this.isFramebufferTexture) { return this.src.rawTexture(); } else { - return this.glTex; + return this.textureHandle; } } - /** - * Sets how a texture is be interpolated when upscaled or downscaled. - * Nearest filtering uses nearest neighbor scaling when interpolating - * Linear filtering uses WebGL's linear scaling when interpolating - * @param {String} downScale Specifies the texture filtering when - * textures are shrunk. Options are LINEAR or NEAREST - * @param {String} upScale Specifies the texture filtering when - * textures are magnified. Options are LINEAR or NEAREST - * @todo implement mipmapping filters - */ - setInterpolation (downScale, upScale) { - const gl = this._renderer.GL; - - this.glMinFilter = this.glFilter(downScale); - this.glMagFilter = this.glFilter(upScale); - - this.bindTexture(); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this.glMinFilter); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, this.glMagFilter); - this.unbindTexture(); + getSampler() { + return this._renderer.getSampler(this); } - glFilter(filter) { - const gl = this._renderer.GL; - if (filter === constants.NEAREST) { - return gl.NEAREST; - } else { - return gl.LINEAR; - } + setInterpolation(minFilter, magFilter) { + this.minFilter = minFilter; + this.magFilter = magFilter; + this._renderer.setTextureParams(this); } - /** - * Sets the texture wrapping mode. This controls how textures behave - * when their uv's go outside of the 0 - 1 range. There are three options: - * CLAMP, REPEAT, and MIRROR. REPEAT & MIRROR are only available if the texture - * is a power of two size (128, 256, 512, 1024, etc.). - * @param {String} wrapX Controls the horizontal texture wrapping behavior - * @param {String} wrapY Controls the vertical texture wrapping behavior - */ - setWrapMode (wrapX, wrapY) { - const gl = this._renderer.GL; - - // for webgl 1 we need to check if the texture is power of two - // if it isn't we will set the wrap mode to CLAMP - // webgl2 will support npot REPEAT and MIRROR but we don't check for it yet - const isPowerOfTwo = x => (x & (x - 1)) === 0; - const textureData = this._getTextureDataFromSource(); - - let wrapWidth; - let wrapHeight; - - if (textureData.naturalWidth && textureData.naturalHeight) { - wrapWidth = textureData.naturalWidth; - wrapHeight = textureData.naturalHeight; - } else { - wrapWidth = this.width; - wrapHeight = this.height; - } - - const widthPowerOfTwo = isPowerOfTwo(wrapWidth); - const heightPowerOfTwo = isPowerOfTwo(wrapHeight); - - if (wrapX === constants.REPEAT) { - if ( - this._renderer.webglVersion === constants.WEBGL2 || - (widthPowerOfTwo && heightPowerOfTwo) - ) { - this.glWrapS = gl.REPEAT; - } else { - console.warn( - 'You tried to set the wrap mode to REPEAT but the texture size is not a power of two. Setting to CLAMP instead' - ); - this.glWrapS = gl.CLAMP_TO_EDGE; - } - } else if (wrapX === constants.MIRROR) { - if ( - this._renderer.webglVersion === constants.WEBGL2 || - (widthPowerOfTwo && heightPowerOfTwo) - ) { - this.glWrapS = gl.MIRRORED_REPEAT; - } else { - console.warn( - 'You tried to set the wrap mode to MIRROR but the texture size is not a power of two. Setting to CLAMP instead' - ); - this.glWrapS = gl.CLAMP_TO_EDGE; - } - } else { - // falling back to default if didn't get a proper mode - this.glWrapS = gl.CLAMP_TO_EDGE; - } - - if (wrapY === constants.REPEAT) { - if ( - this._renderer.webglVersion === constants.WEBGL2 || - (widthPowerOfTwo && heightPowerOfTwo) - ) { - this.glWrapT = gl.REPEAT; - } else { - console.warn( - 'You tried to set the wrap mode to REPEAT but the texture size is not a power of two. Setting to CLAMP instead' - ); - this.glWrapT = gl.CLAMP_TO_EDGE; - } - } else if (wrapY === constants.MIRROR) { - if ( - this._renderer.webglVersion === constants.WEBGL2 || - (widthPowerOfTwo && heightPowerOfTwo) - ) { - this.glWrapT = gl.MIRRORED_REPEAT; - } else { - console.warn( - 'You tried to set the wrap mode to MIRROR but the texture size is not a power of two. Setting to CLAMP instead' - ); - this.glWrapT = gl.CLAMP_TO_EDGE; - } - } else { - // falling back to default if didn't get a proper mode - this.glWrapT = gl.CLAMP_TO_EDGE; - } - - this.bindTexture(); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, this.glWrapS); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, this.glWrapT); - this.unbindTexture(); + setWrapMode(wrapX, wrapY) { + this.wrapS = wrapX; + this.wrapT = wrapY; + this._renderer.setTextureParams(this); } } @@ -513,27 +384,6 @@ function texture(p5, fn){ p5.MipmapTexture = MipmapTexture; } -export function checkWebGLCapabilities({ GL, webglVersion }) { - const gl = GL; - const supportsFloat = webglVersion === constants.WEBGL2 - ? (gl.getExtension('EXT_color_buffer_float') && - gl.getExtension('EXT_float_blend')) - : gl.getExtension('OES_texture_float'); - const supportsFloatLinear = supportsFloat && - gl.getExtension('OES_texture_float_linear'); - const supportsHalfFloat = webglVersion === constants.WEBGL2 - ? gl.getExtension('EXT_color_buffer_float') - : gl.getExtension('OES_texture_half_float'); - const supportsHalfFloatLinear = supportsHalfFloat && - gl.getExtension('OES_texture_half_float_linear'); - return { - float: supportsFloat, - floatLinear: supportsFloatLinear, - halfFloat: supportsHalfFloat, - halfFloatLinear: supportsHalfFloatLinear - }; -} - export default texture; export { Texture, MipmapTexture }; diff --git a/src/webgl/shaders/basic.frag b/src/webgl/shaders/basic.frag index e583955d36..1406964ca9 100644 --- a/src/webgl/shaders/basic.frag +++ b/src/webgl/shaders/basic.frag @@ -1,6 +1,7 @@ IN vec4 vColor; void main(void) { HOOK_beforeFragment(); - OUT_COLOR = HOOK_getFinalColor(vec4(vColor.rgb, 1.) * vColor.a); + OUT_COLOR = HOOK_getFinalColor(vColor); + OUT_COLOR.rgb *= OUT_COLOR.a; // Premultiply alpha before rendering HOOK_afterFragment(); } diff --git a/src/webgl/shaders/lighting.glsl b/src/webgl/shaders/lighting.glsl index b66ac083d1..85a4c79684 100644 --- a/src/webgl/shaders/lighting.glsl +++ b/src/webgl/shaders/lighting.glsl @@ -7,8 +7,6 @@ uniform mat4 uViewMatrix; uniform bool uUseLighting; -uniform int uAmbientLightCount; -uniform vec3 uAmbientColor[5]; uniform mat3 uCameraRotation; uniform int uDirectionalLightCount; uniform vec3 uLightingDirection[5]; diff --git a/src/webgl/shaders/line.vert b/src/webgl/shaders/line.vert index de422ad6b6..65cd9502c6 100644 --- a/src/webgl/shaders/line.vert +++ b/src/webgl/shaders/line.vert @@ -127,7 +127,7 @@ void main() { inputs.tangentOut = (uModelViewMatrix * vec4(aTangentOut, 0.)).xyz; #endif #ifdef AUGMENTED_HOOK_getCameraInputs - inputs = hook_getCameraInputs(inputs); + inputs = HOOK_getCameraInputs(inputs); #endif vec4 posp = vec4(inputs.position, 1.); @@ -271,6 +271,7 @@ void main() { } } else { vec2 tangent = aTangentIn == vec3(0.) ? tangentOut : tangentIn; + vTangent = tangent; vec2 normal = vec2(-tangent.y, tangent.x); diff --git a/src/webgl/shaders/phong.frag b/src/webgl/shaders/phong.frag index a424c6220c..78cfb76163 100644 --- a/src/webgl/shaders/phong.frag +++ b/src/webgl/shaders/phong.frag @@ -2,6 +2,7 @@ precision highp int; uniform bool uHasSetAmbient; +uniform vec3 uAmbientColor; uniform vec4 uSpecularMatColor; uniform vec4 uAmbientMatColor; uniform vec4 uEmissiveMatColor; @@ -13,7 +14,6 @@ uniform bool isTexture; IN vec3 vNormal; IN vec2 vTexCoord; IN vec3 vViewPosition; -IN vec3 vAmbientColor; IN vec4 vColor; struct ColorComponents { @@ -45,7 +45,7 @@ void main(void) { Inputs inputs; inputs.normal = normalize(vNormal); inputs.texCoord = vTexCoord; - inputs.ambientLight = vAmbientColor; + inputs.ambientLight = uAmbientColor; inputs.color = isTexture ? TEXTURE(uSampler, vTexCoord) * (vec4(uTint.rgb/255., 1.) * uTint.a/255.) : vColor; @@ -67,7 +67,6 @@ void main(void) { // Calculating final color as result of all lights (plus emissive term). - vec2 texCoord = inputs.texCoord; vec4 baseColor = inputs.color; ColorComponents c; c.opacity = baseColor.a; diff --git a/src/webgl/shaders/phong.vert b/src/webgl/shaders/phong.vert index 670da028c1..49a10933fc 100644 --- a/src/webgl/shaders/phong.vert +++ b/src/webgl/shaders/phong.vert @@ -7,8 +7,6 @@ IN vec3 aNormal; IN vec2 aTexCoord; IN vec4 aVertexColor; -uniform vec3 uAmbientColor[5]; - #ifdef AUGMENTED_HOOK_getWorldInputs uniform mat4 uModelMatrix; uniform mat4 uViewMatrix; @@ -19,7 +17,6 @@ uniform mat4 uModelViewMatrix; uniform mat3 uNormalMatrix; #endif uniform mat4 uProjectionMatrix; -uniform int uAmbientLightCount; uniform bool uUseVertexColor; uniform vec4 uMaterialColor; @@ -74,14 +71,6 @@ void main(void) { vNormal = inputs.normal; vColor = inputs.color; - // TODO: this should be a uniform - vAmbientColor = vec3(0.0); - for (int i = 0; i < 5; i++) { - if (i < uAmbientLightCount) { - vAmbientColor += uAmbientColor[i]; - } - } - gl_Position = uProjectionMatrix * vec4(inputs.position, 1.); HOOK_afterVertex(); } diff --git a/src/webgl/shaders/webgl2Compatibility.glsl b/src/webgl/shaders/webgl2Compatibility.glsl index 9664e05a52..8c9dbddec6 100644 --- a/src/webgl/shaders/webgl2Compatibility.glsl +++ b/src/webgl/shaders/webgl2Compatibility.glsl @@ -28,7 +28,7 @@ out vec4 outColor; #ifdef FRAGMENT_SHADER vec4 getTexture(in sampler2D content, vec2 coord) { vec4 color = TEXTURE(content, coord); - color.rgb /= color.a; + if (color.a > 0.) color.rgb /= color.a; return color; } #endif diff --git a/src/webgl/text.js b/src/webgl/text.js index a69ef3736f..b5a9842345 100644 --- a/src/webgl/text.js +++ b/src/webgl/text.js @@ -1,11 +1,11 @@ import * as constants from '../core/constants'; -import { RendererGL } from './p5.RendererGL'; +import { Renderer3D } from '../core/p5.Renderer3D'; import { Vector } from '../math/p5.Vector'; import { Geometry } from './p5.Geometry'; import { Font, arrayCommandsToObjects } from '../type/p5.Font'; function text(p5, fn) { - RendererGL.prototype.maxCachedGlyphs = function() { + Renderer3D.prototype.maxCachedGlyphs = function() { // TODO: use more than vibes to find a good value for this return 200; }; @@ -27,7 +27,7 @@ function text(p5, fn) { // Text/Typography (see src/type/textCore.js) /* - RendererGL.prototype.textWidth = function(s) { + Renderer3D.prototype.textWidth = function(s) { if (this._isOpenType()) { return this.states.textFont.font._textWidth(s, this.states.textSize); } @@ -683,7 +683,7 @@ function text(p5, fn) { } } - RendererGL.prototype._renderText = function (line, x, y, maxY, minY) { + Renderer3D.prototype._renderText = function (line, x, y, maxY, minY) { if (!this.states.textFont || typeof this.states.textFont === 'string') { console.log( 'WEBGL: you must load and set a font before drawing text. See `loadFont` and `textFont` for more details.' @@ -733,7 +733,7 @@ function text(p5, fn) { const initializeShader = !this._defaultFontShader; const sh = this._getFontShader(); sh.init(); - sh.bindShader(); // first time around, bind the shader fully + sh.bindShader('text'); // first time around, bind the shader fully if (initializeShader) { // these are constants, really. just initialize them one-time. diff --git a/src/webgl/utils.js b/src/webgl/utils.js new file mode 100644 index 0000000000..0727e91e1f --- /dev/null +++ b/src/webgl/utils.js @@ -0,0 +1,471 @@ +import * as constants from '../core/constants'; +import { Texture } from './p5.Texture'; + +/** + * @private + * @param {Uint8Array|Float32Array|undefined} pixels An existing pixels array to reuse if the size is the same + * @param {WebGLRenderingContext} gl The WebGL context + * @param {WebGLFramebuffer|null} framebuffer The Framebuffer to read + * @param {Number} x The x coordiante to read, premultiplied by pixel density + * @param {Number} y The y coordiante to read, premultiplied by pixel density + * @param {Number} width The width in pixels to be read (factoring in pixel density) + * @param {Number} height The height in pixels to be read (factoring in pixel density) + * @param {GLEnum} format Either RGB or RGBA depending on how many channels to read + * @param {GLEnum} type The datatype of each channel, e.g. UNSIGNED_BYTE or FLOAT + * @param {Number|undefined} flipY If provided, the total height with which to flip the y axis about + * @returns {Uint8Array|Float32Array} pixels A pixels array with the current state of the + * WebGL context read into it + */ +export function readPixelsWebGL( + pixels, + gl, + framebuffer, + x, + y, + width, + height, + format, + type, + flipY +) { + // Record the currently bound framebuffer so we can go back to it after, and + // bind the framebuffer we want to read from + const prevFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer); + + const channels = format === gl.RGBA ? 4 : 3; + + // Make a pixels buffer if it doesn't already exist + const len = width * height * channels; + const TypedArrayClass = type === gl.UNSIGNED_BYTE ? Uint8Array : Float32Array; + if (!(pixels instanceof TypedArrayClass) || pixels.length !== len) { + pixels = new TypedArrayClass(len); + } + + gl.readPixels( + x, + flipY ? flipY - y - height : y, + width, + height, + format, + type, + pixels + ); + + // Re-bind whatever was previously bound + gl.bindFramebuffer(gl.FRAMEBUFFER, prevFramebuffer); + + if (flipY) { + // WebGL pixels are inverted compared to 2D pixels, so we have to flip + // the resulting rows. Adapted from https://stackoverflow.com/a/41973289 + const halfHeight = Math.floor(height / 2); + const tmpRow = new TypedArrayClass(width * channels); + for (let y = 0; y < halfHeight; y++) { + const topOffset = y * width * 4; + const bottomOffset = (height - y - 1) * width * 4; + tmpRow.set(pixels.subarray(topOffset, topOffset + width * 4)); + pixels.copyWithin(topOffset, bottomOffset, bottomOffset + width * 4); + pixels.set(tmpRow, bottomOffset); + } + } + + return pixels; +} + +/** + * @private + * @param {WebGLRenderingContext} gl The WebGL context + * @param {WebGLFramebuffer|null} framebuffer The Framebuffer to read + * @param {Number} x The x coordinate to read, premultiplied by pixel density + * @param {Number} y The y coordinate to read, premultiplied by pixel density + * @param {GLEnum} format Either RGB or RGBA depending on how many channels to read + * @param {GLEnum} type The datatype of each channel, e.g. UNSIGNED_BYTE or FLOAT + * @param {Number|undefined} flipY If provided, the total height with which to flip the y axis about + * @returns {Number[]} pixels The channel data for the pixel at that location + */ +export function readPixelWebGL(gl, framebuffer, x, y, format, type, flipY) { + // Record the currently bound framebuffer so we can go back to it after, and + // bind the framebuffer we want to read from + const prevFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer); + + const channels = format === gl.RGBA ? 4 : 3; + const TypedArrayClass = type === gl.UNSIGNED_BYTE ? Uint8Array : Float32Array; + const pixels = new TypedArrayClass(channels); + + gl.readPixels(x, flipY ? flipY - y - 1 : y, 1, 1, format, type, pixels); + + // Re-bind whatever was previously bound + gl.bindFramebuffer(gl.FRAMEBUFFER, prevFramebuffer); + + return Array.from(pixels); +} + +export function setWebGLTextureParams(texture, gl, webglVersion) { + texture.bindTexture(); + const glMinFilter = texture.minFilter === constants.NEAREST ? gl.NEAREST : gl.LINEAR; + const glMagFilter = texture.magFilter === constants.NEAREST ? gl.NEAREST : gl.LINEAR; + + // for webgl 1 we need to check if the texture is power of two + // if it isn't we will set the wrap mode to CLAMP + // webgl2 will support npot REPEAT and MIRROR but we don't check for it yet + const isPowerOfTwo = x => (x & (x - 1)) === 0; + const textureData = texture._getTextureDataFromSource(); + + let wrapWidth; + let wrapHeight; + + if (textureData.naturalWidth && textureData.naturalHeight) { + wrapWidth = textureData.naturalWidth; + wrapHeight = textureData.naturalHeight; + } else { + wrapWidth = texture.width; + wrapHeight = texture.height; + } + + const widthPowerOfTwo = isPowerOfTwo(wrapWidth); + const heightPowerOfTwo = isPowerOfTwo(wrapHeight); + let glWrapS, glWrapT; + + if (texture.wrapS === constants.REPEAT) { + if ( + webglVersion === constants.WEBGL2 || + (widthPowerOfTwo && heightPowerOfTwo) + ) { + glWrapS = gl.REPEAT; + } else { + console.warn( + 'You tried to set the wrap mode to REPEAT but the texture size is not a power of two. Setting to CLAMP instead' + ); + glWrapS = gl.CLAMP_TO_EDGE; + } + } else if (texture.wrapS === constants.MIRROR) { + if ( + webglVersion === constants.WEBGL2 || + (widthPowerOfTwo && heightPowerOfTwo) + ) { + glWrapS = gl.MIRRORED_REPEAT; + } else { + console.warn( + 'You tried to set the wrap mode to MIRROR but the texture size is not a power of two. Setting to CLAMP instead' + ); + glWrapS = gl.CLAMP_TO_EDGE; + } + } else { + // falling back to default if didn't get a proper mode + glWrapS = gl.CLAMP_TO_EDGE; + } + + if (texture.wrapT === constants.REPEAT) { + if ( + webglVersion === constants.WEBGL2 || + (widthPowerOfTwo && heightPowerOfTwo) + ) { + glWrapT = gl.REPEAT; + } else { + console.warn( + 'You tried to set the wrap mode to REPEAT but the texture size is not a power of two. Setting to CLAMP instead' + ); + glWrapT = gl.CLAMP_TO_EDGE; + } + } else if (texture.wrapT === constants.MIRROR) { + if ( + webglVersion === constants.WEBGL2 || + (widthPowerOfTwo && heightPowerOfTwo) + ) { + glWrapT = gl.MIRRORED_REPEAT; + } else { + console.warn( + 'You tried to set the wrap mode to MIRROR but the texture size is not a power of two. Setting to CLAMP instead' + ); + glWrapT = gl.CLAMP_TO_EDGE; + } + } else { + // falling back to default if didn't get a proper mode + glWrapT = gl.CLAMP_TO_EDGE; + } + + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, glMinFilter); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, glMagFilter); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, glWrapS); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, glWrapT); + texture.unbindTexture(); +} + +export function setWebGLUniformValue(shader, uniform, data, getTexture, gl) { + const location = uniform.location; + shader.useProgram(); + + switch (uniform.type) { + case gl.BOOL: + if (data === true) { + gl.uniform1i(location, 1); + } else { + gl.uniform1i(location, 0); + } + break; + case gl.INT: + if (uniform.size > 1) { + data.length && gl.uniform1iv(location, data); + } else { + gl.uniform1i(location, data); + } + break; + case gl.FLOAT: + if (uniform.size > 1) { + data.length && gl.uniform1fv(location, data); + } else { + gl.uniform1f(location, data); + } + break; + case gl.FLOAT_MAT3: + gl.uniformMatrix3fv(location, false, data); + break; + case gl.FLOAT_MAT4: + gl.uniformMatrix4fv(location, false, data); + break; + case gl.FLOAT_VEC2: + if (uniform.size > 1) { + data.length && gl.uniform2fv(location, data); + } else { + gl.uniform2f(location, data[0], data[1]); + } + break; + case gl.FLOAT_VEC3: + if (uniform.size > 1) { + data.length && gl.uniform3fv(location, data); + } else { + gl.uniform3f(location, data[0], data[1], data[2]); + } + break; + case gl.FLOAT_VEC4: + if (uniform.size > 1) { + data.length && gl.uniform4fv(location, data); + } else { + gl.uniform4f(location, data[0], data[1], data[2], data[3]); + } + break; + case gl.INT_VEC2: + if (uniform.size > 1) { + data.length && gl.uniform2iv(location, data); + } else { + gl.uniform2i(location, data[0], data[1]); + } + break; + case gl.INT_VEC3: + if (uniform.size > 1) { + data.length && gl.uniform3iv(location, data); + } else { + gl.uniform3i(location, data[0], data[1], data[2]); + } + break; + case gl.INT_VEC4: + if (uniform.size > 1) { + data.length && gl.uniform4iv(location, data); + } else { + gl.uniform4i(location, data[0], data[1], data[2], data[3]); + } + break; + case gl.SAMPLER_2D: + if (typeof data == 'number') { + if ( + data < gl.TEXTURE0 || + data > gl.TEXTURE31 || + data !== Math.ceil(data) + ) { + console.log( + '🌸 p5.js says: ' + + "You're trying to use a number as the data for a texture." + + 'Please use a texture.' + ); + return this; + } + gl.activeTexture(data); + gl.uniform1i(location, data); + } else { + gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); + uniform.texture = + data instanceof Texture ? data : getTexture(data); + gl.uniform1i(location, uniform.samplerIndex); + if (uniform.texture.src.gifProperties) { + uniform.texture.src._animateGif(this._pInst); + } + } + break; + case gl.SAMPLER_CUBE: + case gl.SAMPLER_3D: + case gl.SAMPLER_2D_SHADOW: + case gl.SAMPLER_2D_ARRAY: + case gl.SAMPLER_2D_ARRAY_SHADOW: + case gl.SAMPLER_CUBE_SHADOW: + case gl.INT_SAMPLER_2D: + case gl.INT_SAMPLER_3D: + case gl.INT_SAMPLER_CUBE: + case gl.INT_SAMPLER_2D_ARRAY: + case gl.UNSIGNED_INT_SAMPLER_2D: + case gl.UNSIGNED_INT_SAMPLER_3D: + case gl.UNSIGNED_INT_SAMPLER_CUBE: + case gl.UNSIGNED_INT_SAMPLER_2D_ARRAY: + if (typeof data !== 'number') { + break; + } + if ( + data < gl.TEXTURE0 || + data > gl.TEXTURE31 || + data !== Math.ceil(data) + ) { + console.log( + '🌸 p5.js says: ' + + "You're trying to use a number as the data for a texture." + + 'Please use a texture.' + ); + break; + } + gl.activeTexture(data); + gl.uniform1i(location, data); + break; + //@todo complete all types + } +} + +export function getWebGLUniformMetadata(shader, gl) { + const program = shader._glProgram; + + const numUniforms = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS); + const result = []; + + let samplerIndex = 0; + + for (let i = 0; i < numUniforms; ++i) { + const uniformInfo = gl.getActiveUniform(program, i); + const uniform = {}; + uniform.location = gl.getUniformLocation( + program, + uniformInfo.name + ); + uniform.size = uniformInfo.size; + let uniformName = uniformInfo.name; + //uniforms that are arrays have their name returned as + //someUniform[0] which is a bit silly so we trim it + //off here. The size property tells us that its an array + //so we dont lose any information by doing this + if (uniformInfo.size > 1) { + uniformName = uniformName.substring(0, uniformName.indexOf('[0]')); + } + uniform.name = uniformName; + uniform.type = uniformInfo.type; + uniform._cachedData = undefined; + if (uniform.type === gl.SAMPLER_2D) { + uniform.isSampler = true; + uniform.samplerIndex = samplerIndex; + samplerIndex++; + } + + uniform.isArray = + uniformInfo.size > 1 || + uniform.type === gl.FLOAT_MAT3 || + uniform.type === gl.FLOAT_MAT4 || + uniform.type === gl.FLOAT_VEC2 || + uniform.type === gl.FLOAT_VEC3 || + uniform.type === gl.FLOAT_VEC4 || + uniform.type === gl.INT_VEC2 || + uniform.type === gl.INT_VEC4 || + uniform.type === gl.INT_VEC3; + + result.push(uniform); + } + + return result; +} + +export function getWebGLShaderAttributes(shader, gl) { + const attributes = {}; + + const numAttributes = gl.getProgramParameter( + shader._glProgram, + gl.ACTIVE_ATTRIBUTES + ); + for (let i = 0; i < numAttributes; ++i) { + const attributeInfo = gl.getActiveAttrib(shader._glProgram, i); + const name = attributeInfo.name; + const location = gl.getAttribLocation(shader._glProgram, name); + const attribute = {}; + attribute.name = name; + attribute.location = location; + attribute.index = i; + attribute.type = attributeInfo.type; + attribute.size = attributeInfo.size; + attributes[name] = attribute; + } + + return attributes; +} + +export function populateGLSLHooks(shader, src, shaderType) { + const main = 'void main'; + if (!src.includes(main)) return src; + + let [preMain, postMain] = src.split(main); + + let hooks = ''; + let defines = ''; + for (const key in shader.hooks.uniforms) { + hooks += `uniform ${key};\n`; + } + if (shader.hooks.declarations) { + hooks += shader.hooks.declarations + '\n'; + } + if (shader.hooks[shaderType].declarations) { + hooks += shader.hooks[shaderType].declarations + '\n'; + } + for (const hookDef in shader.hooks.helpers) { + hooks += `${hookDef}${shader.hooks.helpers[hookDef]}\n`; + } + for (const hookDef in shader.hooks[shaderType]) { + if (hookDef === 'declarations') continue; + const [hookType, hookName] = hookDef.split(' '); + + // Add a #define so that if the shader wants to use preprocessor directives to + // optimize away the extra function calls in main, it can do so + if ( + shader.hooks.modified.vertex[hookDef] || + shader.hooks.modified.fragment[hookDef] + ) { + defines += '#define AUGMENTED_HOOK_' + hookName + '\n'; + } + + hooks += + hookType + ' HOOK_' + hookName + shader.hooks[shaderType][hookDef] + '\n'; + } + + // Allow shaders to specify the location of hook #define statements. Normally these + // go after function definitions, but one might want to have them defined earlier + // in order to only conditionally make uniforms. + if (preMain.indexOf('#define HOOK_DEFINES') !== -1) { + preMain = preMain.replace('#define HOOK_DEFINES', '\n' + defines + '\n'); + defines = ''; + } + + return preMain + '\n' + defines + hooks + main + postMain; +} + +export function checkWebGLCapabilities({ GL, webglVersion }) { + const gl = GL; + const supportsFloat = webglVersion === constants.WEBGL2 + ? (gl.getExtension('EXT_color_buffer_float') && + gl.getExtension('EXT_float_blend')) + : gl.getExtension('OES_texture_float'); + const supportsFloatLinear = supportsFloat && + gl.getExtension('OES_texture_float_linear'); + const supportsHalfFloat = webglVersion === constants.WEBGL2 + ? gl.getExtension('EXT_color_buffer_float') + : gl.getExtension('OES_texture_half_float'); + const supportsHalfFloatLinear = supportsHalfFloat && + gl.getExtension('OES_texture_half_float_linear'); + return { + float: supportsFloat, + floatLinear: supportsFloatLinear, + halfFloat: supportsHalfFloat, + halfFloatLinear: supportsHalfFloatLinear + }; +} diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js new file mode 100644 index 0000000000..2bcf39949d --- /dev/null +++ b/src/webgpu/p5.RendererWebGPU.js @@ -0,0 +1,1941 @@ +import { Renderer3D, getStrokeDefs } from '../core/p5.Renderer3D'; +import { Shader } from '../webgl/p5.Shader'; +import { Texture } from '../webgl/p5.Texture'; +import { Image } from '../image/p5.Image'; +import { RGB, RGBA } from '../color/creating_reading'; +import * as constants from '../core/constants'; + + +import { colorVertexShader, colorFragmentShader } from './shaders/color'; +import { lineVertexShader, lineFragmentShader} from './shaders/line'; +import { materialVertexShader, materialFragmentShader } from './shaders/material'; +import {Graphics} from "../core/p5.Graphics"; +import {Element} from "../dom/p5.Element"; + +const { lineDefs } = getStrokeDefs((n, v, t) => `const ${n}: ${t} = ${v};\n`); + +class RendererWebGPU extends Renderer3D { + constructor(pInst, w, h, isMainCanvas, elt) { + super(pInst, w, h, isMainCanvas, elt) + + this.renderPass = {}; + + this.samplers = new Map(); + + // Single reusable staging buffer for pixel reading + this.pixelReadBuffer = null; + this.pixelReadBufferSize = 0; + + // Lazy readback texture for main canvas pixel reading + this.canvasReadbackTexture = null; + } + + async setupContext() { + this._setAttributeDefaults(this._pInst); + await this._initContext(); + } + + _setAttributeDefaults(pInst) { + const defaults = { + forceFallbackAdapter: false, + powerPreference: 'high-performance', + }; + if (pInst._webgpuAttributes === null) { + pInst._webgpuAttributes = defaults; + } else { + pInst._webgpuAttributes = Object.assign(defaults, pInst._webgpuAttributes); + } + return; + } + + async _initContext() { + this.adapter = await navigator.gpu?.requestAdapter(this._webgpuAttributes); + // console.log('Adapter:'); + // console.log(this.adapter); + if (this.adapter) { + console.log([...this.adapter.features]); + } + this.device = await this.adapter?.requestDevice({ + // Todo: check support + requiredFeatures: ['depth32float-stencil8'] + }); + // console.log('Device:'); + // console.log(this.device); + if (!this.device) { + throw new Error('Your browser does not support WebGPU.'); + } + this.queue = this.device.queue; + this.drawingContext = this.canvas.getContext('webgpu'); + this.presentationFormat = navigator.gpu.getPreferredCanvasFormat(); + this.drawingContext.configure({ + device: this.device, + format: this.presentationFormat, + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_SRC, + }); + + // TODO disablable stencil + this.depthFormat = 'depth24plus-stencil8'; + this._updateSize(); + this._update(); + } + + async _setAttributes(key, value) { + if (typeof this._pInst._webgpuAttributes === "undefined") { + console.log( + "You are trying to use setAttributes on a p5.Graphics object " + + "that does not use a WebGPU renderer." + ); + return; + } + let unchanged = true; + + if (typeof value !== "undefined") { + //first time modifying the attributes + if (this._pInst._webgpuAttributes === null) { + this._pInst._webgpuAttributes = {}; + } + if (this._pInst._webgpuAttributes[key] !== value) { + //changing value of previously altered attribute + this._webgpuAttributes[key] = value; + unchanged = false; + } + //setting all attributes with some change + } else if (key instanceof Object) { + if (this._pInst._webgpuAttributes !== key) { + this._pInst._webgpuAttributes = key; + unchanged = false; + } + } + //@todo_FES + if (!this.isP3D || unchanged) { + return; + } + + if (!this._pInst._setupDone) { + if (this.geometryBufferCache.numCached() > 0) { + p5._friendlyError( + "Sorry, Could not set the attributes, you need to call setAttributes() " + + "before calling the other drawing methods in setup()" + ); + return; + } + } + + await this._resetContext(null, null, RendererWebGPU); + + if (this.states.curCamera) { + this.states.curCamera._renderer = this._renderer; + } + } + + _updateSize() { + if (this.depthTexture && this.depthTexture.destroy) { + this.depthTexture.destroy(); + } + this.depthTexture = this.device.createTexture({ + size: { + width: Math.ceil(this.width * this._pixelDensity), + height: Math.ceil(this.height * this._pixelDensity), + depthOrArrayLayers: 1, + }, + format: this.depthFormat, + usage: GPUTextureUsage.RENDER_ATTACHMENT, + }); + + // Clear the main canvas after resize + this.clear(); + + // Destroy existing readback texture when size changes + if (this.canvasReadbackTexture && this.canvasReadbackTexture.destroy) { + this.canvasReadbackTexture.destroy(); + this.canvasReadbackTexture = null; + } + } + + clear(...args) { + const _r = args[0] || 0; + const _g = args[1] || 0; + const _b = args[2] || 0; + const _a = args[3] || 0; + + const commandEncoder = this.device.createCommandEncoder(); + + // Use framebuffer texture if active, otherwise use canvas texture + const activeFramebuffer = this.activeFramebuffer(); + const colorTexture = activeFramebuffer ? + (activeFramebuffer.aaColorTexture || activeFramebuffer.colorTexture) : + this.drawingContext.getCurrentTexture(); + + const colorAttachment = { + view: colorTexture.createView(), + clearValue: { r: _r * _a, g: _g * _a, b: _b * _a, a: _a }, + loadOp: 'clear', + storeOp: 'store', + // If using multisampled texture, resolve to non-multisampled texture + resolveTarget: activeFramebuffer && activeFramebuffer.aaColorTexture ? + activeFramebuffer.colorTexture.createView() : undefined, + }; + + // Use framebuffer depth texture if active, otherwise use canvas depth texture + const depthTexture = activeFramebuffer ? + (activeFramebuffer.aaDepthTexture || activeFramebuffer.depthTexture) : + this.depthTexture; + const depthTextureView = depthTexture?.createView(); + const depthAttachment = depthTextureView + ? { + view: depthTextureView, + depthClearValue: 1.0, + depthLoadOp: 'clear', + depthStoreOp: 'store', + stencilLoadOp: 'load', + stencilStoreOp: 'store', + } + : undefined; + + const renderPassDescriptor = { + colorAttachments: [colorAttachment], + ...(depthAttachment ? { depthStencilAttachment: depthAttachment } : {}), + }; + + const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); + passEncoder.end(); + + this.queue.submit([commandEncoder.finish()]); + } + + _prepareBuffer(renderBuffer, geometry, shader) { + const attr = shader.attributes[renderBuffer.attr]; + if (!attr) return; + + const { src, dst, size, map } = renderBuffer; + + const device = this.device; + const buffers = this._getOrMakeCachedBuffers(geometry); + let srcData = geometry[src]; + if (!srcData || srcData.length === 0) { + if (renderBuffer.default) { + srcData = geometry[src] = renderBuffer.default(geometry); + srcData.isDefault = true; + } else { + return; + } + } + + const raw = map ? map(srcData) : srcData; + const typed = this._normalizeBufferData(raw, Float32Array); + + let buffer = buffers[dst]; + let recreated = false; + if (!buffer || buffer.size < typed.byteLength) { + recreated = true; + if (buffer) buffer.destroy(); + buffer = device.createBuffer({ + size: typed.byteLength, + usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST, + }); + buffers[dst] = buffer; + } + + if (recreated || geometry.dirtyFlags[src] !== false) { + device.queue.writeBuffer(buffer, 0, typed); + geometry.dirtyFlags[src] = false; + } + + shader.enableAttrib(attr, size); + } + + _disableRemainingAttributes(shader) {} + + _enableAttrib(attr) { + // TODO: is this necessary? + const loc = attr.location; + if (!this.registerEnabled.has(loc)) { + // TODO + // this.renderPass.setVertexBuffer(loc, buffer); + this.registerEnabled.add(loc); + } + } + + _ensureGeometryBuffers(buffers, indices, indexType) { + if (!indices) return; + + const device = this.device; + + const buffer = device.createBuffer({ + size: indices.length * indexType.BYTES_PER_ELEMENT, + usage: GPUBufferUsage.INDEX | GPUBufferUsage.COPY_DST, + mappedAtCreation: true, + }); + + // Write index data to buffer + const mapping = new indexType(buffer.getMappedRange()); + mapping.set(indices); + buffer.unmap(); + + buffers.indexBuffer = buffer; + buffers.indexBufferType = indexType === Uint32Array ? 'uint32' : 'uint16'; + } + + _freeBuffers(buffers) { + const destroyIfExists = (buf) => { + if (buf && buf.destroy) { + buf.destroy(); + } + }; + + destroyIfExists(buffers.indexBuffer); + + const freeDefs = (defs) => { + for (const def of defs) { + destroyIfExists(buffers[def.dst]); + buffers[def.dst] = null; + } + }; + + freeDefs(this.renderer.buffers.stroke); + freeDefs(this.renderer.buffers.fill); + freeDefs(this.renderer.buffers.user); + } + + _getValidSampleCount(requestedCount) { + // WebGPU supports sample counts of 1, 4 (and sometimes 8) + if (requestedCount <= 1) return 1; + if (requestedCount <= 4) return 4; + return 4; // Cap at 4 for broader compatibility + } + + _shaderOptions({ mode }) { + const activeFramebuffer = this.activeFramebuffer(); + const format = activeFramebuffer ? + this._getWebGPUColorFormat(activeFramebuffer) : + this.presentationFormat; + + const requestedSampleCount = activeFramebuffer ? + (activeFramebuffer.antialias ? activeFramebuffer.antialiasSamples : 1) : + (this.antialias || 1); + const sampleCount = this._getValidSampleCount(requestedSampleCount); + + const depthFormat = activeFramebuffer && activeFramebuffer.useDepth ? + this._getWebGPUDepthFormat(activeFramebuffer) : + this.depthFormat; + + return { + topology: mode === constants.TRIANGLE_STRIP ? 'triangle-strip' : 'triangle-list', + blendMode: this.states.curBlendMode, + sampleCount, + format, + depthFormat, + } + } + + _initShader(shader) { + const device = this.device; + + shader.vertModule = device.createShaderModule({ code: shader.vertSrc() }); + shader.fragModule = device.createShaderModule({ code: shader.fragSrc() }); + + shader._pipelineCache = new Map(); + shader.getPipeline = ({ topology, blendMode, sampleCount, format, depthFormat }) => { + const key = `${topology}_${blendMode}_${sampleCount}_${format}_${depthFormat}`; + if (!shader._pipelineCache.has(key)) { + const pipeline = device.createRenderPipeline({ + layout: shader._pipelineLayout, + vertex: { + module: shader.vertModule, + entryPoint: 'main', + buffers: this._getVertexLayout(shader), + }, + fragment: { + module: shader.fragModule, + entryPoint: 'main', + targets: [{ + format, + blend: this._getBlendState(blendMode), + }], + }, + primitive: { topology }, + multisample: { count: sampleCount }, + depthStencil: { + format: depthFormat, + depthWriteEnabled: true, + depthCompare: 'less', + stencilFront: { + compare: 'always', + failOp: 'keep', + depthFailOp: 'keep', + passOp: 'keep', + }, + stencilBack: { + compare: 'always', + failOp: 'keep', + depthFailOp: 'keep', + passOp: 'keep', + }, + stencilReadMask: 0xFFFFFFFF, // TODO + stencilWriteMask: 0xFFFFFFFF, + stencilLoadOp: "load", + stencilStoreOp: "store", + }, + }); + shader._pipelineCache.set(key, pipeline); + } + return shader._pipelineCache.get(key); + } + } + + _finalizeShader(shader) { + const rawSize = Math.max( + 0, + ...Object.values(shader.uniforms).filter(u => !u.isSampler).map(u => u.offsetEnd) + ); + const alignedSize = Math.ceil(rawSize / 16) * 16; + shader._uniformData = new Float32Array(alignedSize / 4); + shader._uniformDataView = new DataView(shader._uniformData.buffer); + + shader._uniformBuffer = this.device.createBuffer({ + size: alignedSize, + usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, + }); + + const bindGroupLayouts = new Map(); // group index -> bindGroupLayout + const groupEntries = new Map(); // group index -> array of entries + + // We're enforcing that every shader have a single uniform struct in binding 0 + groupEntries.set(0, [{ + binding: 0, + visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT, + buffer: { type: 'uniform' }, + }]); + + // Add the variable amount of samplers and texture bindings that can come after + for (const sampler of shader.samplers) { + const group = sampler.group; + const entries = groupEntries.get(group) || []; + if (!['sampler', 'texture_2d'].includes(sampler.type)) { + throw new Error(`Unsupported texture type: ${sampler.type}`); + } + + entries.push({ + binding: sampler.binding, + visibility: sampler.visibility, + sampler: sampler.type === 'sampler' + ? { type: 'filtering' } + : undefined, + texture: sampler.type === 'texture_2d' + ? { sampleType: 'float', viewDimension: '2d' } + : undefined, + uniform: sampler, + }); + + groupEntries.set(group, entries); + } + + // Create layouts and bind groups + for (const [group, entries] of groupEntries) { + const layout = this.device.createBindGroupLayout({ entries }); + bindGroupLayouts.set(group, layout); + } + + shader._groupEntries = groupEntries; + shader._bindGroupLayouts = [...bindGroupLayouts.values()]; + shader._pipelineLayout = this.device.createPipelineLayout({ + bindGroupLayouts: shader._bindGroupLayouts, + }); + } + + _getBlendState(mode) { + switch (mode) { + case constants.BLEND: + return { + color: { + operation: 'add', + srcFactor: 'one', + dstFactor: 'one-minus-src-alpha' + }, + alpha: { + operation: 'add', + srcFactor: 'one', + dstFactor: 'one-minus-src-alpha' + } + }; + + case constants.ADD: + return { + color: { + operation: 'add', + srcFactor: 'one', + dstFactor: 'one' + }, + alpha: { + operation: 'add', + srcFactor: 'one', + dstFactor: 'one' + } + }; + + case constants.REMOVE: + return { + color: { + operation: 'add', + srcFactor: 'zero', + dstFactor: 'one-minus-src-alpha' + }, + alpha: { + operation: 'add', + srcFactor: 'zero', + dstFactor: 'one-minus-src-alpha' + } + }; + + case constants.MULTIPLY: + return { + color: { + operation: 'add', + srcFactor: 'dst-color', + dstFactor: 'one-minus-src-alpha' + }, + alpha: { + operation: 'add', + srcFactor: 'dst-alpha', + dstFactor: 'one-minus-src-alpha' + } + }; + + case constants.SCREEN: + return { + color: { + operation: 'add', + srcFactor: 'one', + dstFactor: 'one-minus-src-color' + }, + alpha: { + operation: 'add', + srcFactor: 'one', + dstFactor: 'one-minus-src-alpha' + } + }; + + case constants.EXCLUSION: + return { + color: { + operation: 'add', + srcFactor: 'one-minus-dst-color', + dstFactor: 'one-minus-src-color' + }, + alpha: { + operation: 'add', + srcFactor: 'one', + dstFactor: 'one' + } + }; + + case constants.REPLACE: + return { + color: { + operation: 'add', + srcFactor: 'one', + dstFactor: 'zero' + }, + alpha: { + operation: 'add', + srcFactor: 'one', + dstFactor: 'zero' + } + }; + + case constants.SUBTRACT: + return { + color: { + operation: 'reverse-subtract', + srcFactor: 'one', + dstFactor: 'one' + }, + alpha: { + operation: 'add', + srcFactor: 'one', + dstFactor: 'one-minus-src-alpha' + } + }; + + case constants.DARKEST: + return { + color: { + operation: 'min', + srcFactor: 'one', + dstFactor: 'one' + }, + alpha: { + operation: 'min', + srcFactor: 'one', + dstFactor: 'one' + } + }; + + case constants.LIGHTEST: + return { + color: { + operation: 'max', + srcFactor: 'one', + dstFactor: 'one' + }, + alpha: { + operation: 'max', + srcFactor: 'one', + dstFactor: 'one' + } + }; + + default: + console.warn(`Unsupported blend mode: ${mode}`); + return undefined; + } + } + + _applyColorBlend() {} + + _getVertexLayout(shader) { + const layouts = []; + + for (const attrName in shader.attributes) { + const attr = shader.attributes[attrName]; + if (!attr || attr.location === -1) continue; + // Get the vertex buffer info associated with this attribute + const renderBuffer = + this.buffers[shader.shaderType].find(buf => buf.attr === attrName) || + this.buffers.user.find(buf => buf.attr === attrName); + if (!renderBuffer) continue; + + const { size } = renderBuffer; + // Convert from the number of floats (e.g. 3) to a recognized WebGPU + // format (e.g. "float32x3") + const format = this._getFormatFromSize(size); + + layouts.push({ + arrayStride: size * 4, + stepMode: 'vertex', + attributes: [ + { + shaderLocation: attr.location, + offset: 0, + format, + }, + ], + }); + } + return layouts; + } + + _getVertexBuffers(shader) { + const buffers = []; + + for (const attrName in shader.attributes) { + const attr = shader.attributes[attrName]; + if (!attr || attr.location === -1) continue; + + // Get the vertex buffer info associated with this attribute + const renderBuffer = + this.buffers[shader.shaderType].find(buf => buf.attr === attrName) || + this.buffers.user.find(buf => buf.attr === attrName); + if (!renderBuffer) continue; + + buffers.push(renderBuffer); + } + + return buffers; + } + + _getFormatFromSize(size) { + switch (size) { + case 1: return 'float32'; + case 2: return 'float32x2'; + case 3: return 'float32x3'; + case 4: return 'float32x4'; + default: throw new Error(`Unsupported attribute size: ${size}`); + } + } + + _useShader(shader, options) {} + + _updateViewport() { + this._origViewport = { + width: this.width, + height: this.height, + }; + this._viewport = [0, 0, this.width, this.height]; + } + + _createPixelsArray() { + this.pixels = new Uint8Array( + this.width * this.pixelDensity() * this.height * this.pixelDensity() * 4 + ); + } + + viewport() {} + + zClipRange() { + return [0, 1]; + } + + _resetBuffersBeforeDraw() { + const commandEncoder = this.device.createCommandEncoder(); + + const depthTextureView = this.depthTexture?.createView(); + const depthAttachment = depthTextureView + ? { + view: depthTextureView, + depthClearValue: 1.0, + depthLoadOp: 'clear', + depthStoreOp: 'store', + stencilLoadOp: 'load', + stencilStoreOp: "store", + } + : undefined; + + const renderPassDescriptor = { + colorAttachments: [], + ...(depthAttachment ? { depthStencilAttachment: depthAttachment } : {}), + }; + + const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); + passEncoder.end(); + + this.queue.submit([commandEncoder.finish()]); + } + + ////////////////////////////////////////////// + // Rendering + ////////////////////////////////////////////// + + _drawBuffers(geometry, { mode = constants.TRIANGLES, count = 1 }) { + const buffers = this.geometryBufferCache.getCached(geometry); + if (!buffers) return; + + const commandEncoder = this.device.createCommandEncoder(); + + // Use framebuffer texture if active, otherwise use canvas texture + const activeFramebuffer = this.activeFramebuffer(); + const colorTexture = activeFramebuffer ? + (activeFramebuffer.aaColorTexture || activeFramebuffer.colorTexture) : + this.drawingContext.getCurrentTexture(); + + const colorAttachment = { + view: colorTexture.createView(), + loadOp: "load", + storeOp: "store", + // If using multisampled texture, resolve to non-multisampled texture + resolveTarget: activeFramebuffer && activeFramebuffer.aaColorTexture ? + activeFramebuffer.colorTexture.createView() : undefined, + }; + + // Use framebuffer depth texture if active, otherwise use canvas depth texture + const depthTexture = activeFramebuffer ? + (activeFramebuffer.aaDepthTexture || activeFramebuffer.depthTexture) : + this.depthTexture; + const depthTextureView = depthTexture?.createView(); + const renderPassDescriptor = { + colorAttachments: [colorAttachment], + depthStencilAttachment: depthTextureView + ? { + view: depthTextureView, + depthLoadOp: "load", + depthStoreOp: "store", + depthClearValue: 1.0, + stencilLoadOp: "load", + stencilStoreOp: "store", + depthReadOnly: false, + stencilReadOnly: false, + } + : undefined, + }; + + const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); + const currentShader = this._curShader; + passEncoder.setPipeline(currentShader.getPipeline(this._shaderOptions({ mode }))); + // Bind vertex buffers + for (const buffer of this._getVertexBuffers(currentShader)) { + passEncoder.setVertexBuffer( + currentShader.attributes[buffer.attr].location, + buffers[buffer.dst], + 0 + ); + } + // Bind uniforms + this._packUniforms(this._curShader); + this.device.queue.writeBuffer( + currentShader._uniformBuffer, + 0, + currentShader._uniformData.buffer, + currentShader._uniformData.byteOffset, + currentShader._uniformData.byteLength + ); + + // Bind sampler/texture uniforms + for (const [group, entries] of currentShader._groupEntries) { + const bgEntries = entries.map(entry => { + if (group === 0 && entry.binding === 0) { + return { + binding: 0, + resource: { buffer: currentShader._uniformBuffer }, + }; + } + + if (!entry.uniform.isSampler) { + throw new Error( + 'All non-texture/sampler uniforms should be in the uniform struct!' + ); + } + + return { + binding: entry.binding, + resource: entry.uniform.type === 'sampler' + ? (entry.uniform.textureSource.texture || this._getEmptyTexture()).getSampler() + : (entry.uniform.texture || this._getEmptyTexture()).textureHandle.view, + }; + }); + + const layout = currentShader._bindGroupLayouts[group]; + const bindGroup = this.device.createBindGroup({ + layout, + entries: bgEntries, + }); + passEncoder.setBindGroup(group, bindGroup); + } + + if (currentShader.shaderType === "fill") { + // Bind index buffer and issue draw + if (buffers.indexBuffer) { + const indexFormat = buffers.indexFormat || "uint16"; + passEncoder.setIndexBuffer(buffers.indexBuffer, indexFormat); + passEncoder.drawIndexed(geometry.faces.length * 3, count, 0, 0, 0); + } else { + passEncoder.draw(geometry.vertices.length, count, 0, 0); + } + } + + if (buffers.lineVerticesBuffer && currentShader.shaderType === "stroke") { + passEncoder.draw(geometry.lineVertices.length / 3, count, 0, 0); + } + + passEncoder.end(); + this.queue.submit([commandEncoder.finish()]); + } + + async ensureTexture(source) { + await this.queue.onSubmittedWorkDone(); + await new Promise((res) => requestAnimationFrame(res)); + const tex = this.getTexture(source); + tex.update(); + await this.queue.onSubmittedWorkDone(); + await new Promise((res) => requestAnimationFrame(res)); + } + + ////////////////////////////////////////////// + // SHADER + ////////////////////////////////////////////// + + _packUniforms(shader) { + for (const name in shader.uniforms) { + const uniform = shader.uniforms[name]; + if (uniform.isSampler) continue; + if (uniform.type === 'u32') { + shader._uniformDataView.setUint32(uniform.offset, uniform._cachedData, true); + } else if (uniform.size === 4) { + shader._uniformData.set([uniform._cachedData], uniform.offset / 4); + } else { + shader._uniformData.set(uniform._cachedData, uniform.offset / 4); + } + } + } + + _parseStruct(shaderSource, structName) { + const structMatch = shaderSource.match( + new RegExp(`struct\\s+${structName}\\s*\\{([^\\}]+)\\}`) + ); + if (!structMatch) { + throw new Error(`Can't find a struct definition for ${structName}`); + } + + const structBody = structMatch[1]; + const elements = {}; + let match; + let index = 0; + let offset = 0; + + const elementRegex = + /(?:@location\((\d+)\)\s+)?(\w+):\s*([^\n]+?),?\n/g + + const baseAlignAndSize = (type) => { + if (['f32', 'i32', 'u32', 'bool'].includes(type)) { + return { align: 4, size: 4, items: 1 }; + } + if (/^vec[2-4](|f)$/.test(type)) { + const n = parseInt(type.match(/^vec([2-4])/)[1]); + const size = 4 * n; + const align = n === 2 ? 8 : 16; + return { align, size, items: n }; + } + if (/^mat[2-4](?:x[2-4])?(|f)$/.test(type)) { + if (type[4] === 'x' && type[3] !== type[5]) { + throw new Error('Non-square matrices not implemented yet'); + } + const dim = parseInt(type[3]); + const align = dim === 2 ? 8 : 16; + // Each column must be aligned + const size = Math.ceil(dim * 4 / align) * align * dim; + const pack = dim === 3 + ? (data) => [ + ...data.slice(0, 3), + 0, + ...data.slice(3, 6), + 0, + ...data.slice(6, 9), + 0 + ] + : undefined; + return { align, size, pack, items: dim * dim }; + } + if (/^array<.+>$/.test(type)) { + const [, subtype, rawLength] = type.match(/^array<(.+),\s*(\d+)>/); + const length = parseInt(rawLength); + const { + align: elemAlign, + size: elemSize, + items: elemItems, + pack: elemPack = (data) => [...data] + } = baseAlignAndSize(subtype); + const stride = Math.ceil(elemSize / elemAlign) * elemAlign; + const pack = (data) => { + const result = []; + for (let i = 0; i < data.length; i += elemItems) { + const elemData = elemPack(data.slice(i, elemItems)) + result.push(...elemData); + for (let j = 0; j < stride / 4 - elemData.length; j++) { + result.push(0); + } + } + return result; + }; + return { + align: elemAlign, + size: stride * length, + items: elemItems * length, + pack, + }; + } + throw new Error(`Unknown type in WGSL struct: ${type}`); + }; + + while ((match = elementRegex.exec(structBody)) !== null) { + const [_, location, name, type] = match; + const { size, align, pack } = baseAlignAndSize(type); + offset = Math.ceil(offset / align) * align; + const offsetEnd = offset + size; + elements[name] = { + name, + location: location ? parseInt(location) : undefined, + index, + type, + size, + offset, + offsetEnd, + pack + }; + index++; + offset = offsetEnd; + } + + return elements; + } + + _mapUniformData(uniform, data) { + if (uniform.pack) { + return uniform.pack(data); + } + return data; + } + + _getShaderAttributes(shader) { + const mainMatch = /fn main\(.+:\s*(\S+)\s*\)/.exec(shader._vertSrc); + if (!mainMatch) throw new Error("Can't find `fn main` in vertex shader source"); + const inputType = mainMatch[1]; + + return this._parseStruct(shader.vertSrc(), inputType); + } + + getUniformMetadata(shader) { + // Currently, for ease of parsing, we enforce that the first bind group is a + // struct, which contains all non-sampler uniforms. Then, any subsequent + // groups are individual samplers. + + // Extract the struct name from the uniform variable declaration + const uniformVarRegex = /@group\(0\)\s+@binding\(0\)\s+var\s+(\w+)\s*:\s*(\w+);/; + const uniformVarMatch = uniformVarRegex.exec(shader._vertSrc); + if (!uniformVarMatch) { + throw new Error('Expected a uniform struct bound to @group(0) @binding(0)'); + } + const structType = uniformVarMatch[2]; + const uniforms = this._parseStruct(shader.vertSrc(), structType); + // Extract samplers from group bindings + const samplers = {}; + // TODO: support other texture types + const samplerRegex = /@group\((\d+)\)\s*@binding\((\d+)\)\s*var\s+(\w+)\s*:\s*(texture_2d|sampler);/g; + for (const [src, visibility] of [ + [shader._vertSrc, GPUShaderStage.VERTEX], + [shader._fragSrc, GPUShaderStage.FRAGMENT] + ]) { + let match; + while ((match = samplerRegex.exec(src)) !== null) { + const [_, group, binding, name, type] = match; + const groupIndex = parseInt(group); + const bindingIndex = parseInt(binding); + // We're currently reserving group 0 for non-sampler stuff, which we parse + // above, so we can skip it here while we grab the remaining sampler + // uniforms + if (groupIndex === 0 && bindingIndex === 0) continue; + + const key = `${groupIndex},${bindingIndex}`; + samplers[key] = { + visibility: (samplers[key]?.visibility || 0) | visibility, + group: groupIndex, + binding: bindingIndex, + name, + type, + isSampler: true, + noData: type === 'sampler', + }; + } + + for (const sampler of Object.values(samplers)) { + if (sampler.type.startsWith('texture')) { + const samplerName = sampler.name + '_sampler'; + const samplerNode = Object + .values(samplers) + .find((s) => s.name === samplerName); + if (!samplerNode) { + throw new Error( + `Every shader texture needs an accompanying sampler. Could not find sampler ${samplerName} for texture ${sampler.name}` + ); + } + samplerNode.textureSource = sampler; + } + } + } + return [...Object.values(uniforms).sort((a, b) => a.index - b.index), ...Object.values(samplers)]; + } + + updateUniformValue(_shader, uniform, data) { + if (uniform.isSampler) { + uniform.texture = + data instanceof Texture ? data : this.getTexture(data); + } + } + + _updateTexture(uniform, tex) { + tex.update(); + } + + bindTexture(tex) {} + unbindTexture(tex) {} + _unbindFramebufferTexture(uniform) {} + + createTexture({ width, height, format = 'rgba8unorm', usage }) { + const gpuTexture = this.device.createTexture({ + size: [width, height], + format, + usage: usage || ( + GPUTextureUsage.TEXTURE_BINDING | + GPUTextureUsage.COPY_DST | + GPUTextureUsage.RENDER_ATTACHMENT + ), + }); + return { gpuTexture, view: gpuTexture.createView() }; + } + + uploadTextureFromSource({ gpuTexture }, source) { + this.queue.copyExternalImageToTexture( + { source }, + { texture: gpuTexture }, + [source.width, source.height] + ); + } + + uploadTextureFromData({ gpuTexture }, data, width, height) { + this.queue.writeTexture( + { texture: gpuTexture }, + data, + { bytesPerRow: width * 4, rowsPerImage: height }, + { width, height, depthOrArrayLayers: 1 } + ); + } + + setTextureParams(_texture) {} + + getSampler(texture) { + const key = `${texture.minFilter}_${texture.magFilter}_${texture.wrapS}_${texture.wrapT}`; + if (this.samplers.has(key)) { + return this.samplers.get(key); + } + const constantMapping = { + [constants.NEAREST]: 'nearest', + [constants.LINEAR]: 'linear', + [constants.CLAMP]: 'clamp-to-edge', + [constants.REPEAT]: 'repeat', + [constants.MIRROR]: 'mirror-repeat' + } + const sampler = this.device.createSampler({ + magFilter: constantMapping[texture.magFilter], + minFilter: constantMapping[texture.minFilter], + addressModeU: constantMapping[texture.wrapS], + addressModeV: constantMapping[texture.wrapT], + }); + this.samplers.set(key, sampler); + return sampler; + } + + bindTextureToShader(_texture, _sampler, _uniformName, _unit) {} + + deleteTexture({ gpuTexture }) { + gpuTexture.destroy(); + } + + _getLightShader() { + if (!this._defaultLightShader) { + this._defaultLightShader = new Shader( + this, + materialVertexShader, + materialFragmentShader, + { + vertex: { + "void beforeVertex": "() {}", + "Vertex getObjectInputs": "(inputs: Vertex) { return inputs; }", + "Vertex getWorldInputs": "(inputs: Vertex) { return inputs; }", + "Vertex getCameraInputs": "(inputs: Vertex) { return inputs; }", + "void afterVertex": "() {}", + }, + fragment: { + "void beforeFragment": "() {}", + "Inputs getPixelInputs": "(inputs: Inputs) { return inputs; }", + "vec4f combineColors": `(components: ColorComponents) { + var rgb = vec3(0.0); + rgb += components.diffuse * components.baseColor; + rgb += components.ambient * components.ambientColor; + rgb += components.specular * components.specularColor; + rgb += components.emissive; + return vec4(rgb, components.opacity); + }`, + "vec4f getFinalColor": "(color: vec4) { return color; }", + "void afterFragment": "() {}", + }, + } + ); + } + return this._defaultLightShader; + } + + _getColorShader() { + if (!this._defaultColorShader) { + this._defaultColorShader = new Shader( + this, + colorVertexShader, + colorFragmentShader, + { + vertex: { + "void beforeVertex": "() {}", + "Vertex getObjectInputs": "(inputs: Vertex) { return inputs; }", + "Vertex getWorldInputs": "(inputs: Vertex) { return inputs; }", + "Vertex getCameraInputs": "(inputs: Vertex) { return inputs; }", + "void afterVertex": "() {}", + }, + fragment: { + "void beforeFragment": "() {}", + "vec4 getFinalColor": "(color: vec4) { return color; }", + "void afterFragment": "() {}", + }, + } + ); + } + return this._defaultColorShader; + } + + _getLineShader() { + if (!this._defaultLineShader) { + this._defaultLineShader = new Shader( + this, + lineDefs + lineVertexShader, + lineDefs + lineFragmentShader, + { + vertex: { + "void beforeVertex": "() {}", + "StrokeVertex getObjectInputs": "(inputs: StrokeVertex) { return inputs; }", + "StrokeVertex getWorldInputs": "(inputs: StrokeVertex) { return inputs; }", + "StrokeVertex getCameraInputs": "(inputs: StrokeVertex) { return inputs; }", + "void afterVertex": "() {}", + }, + fragment: { + "void beforeFragment": "() {}", + "Inputs getPixelInputs": "(inputs: Inputs) { return inputs; }", + "vec4 getFinalColor": "(color: vec4) { return color; }", + "bool shouldDiscard": "(outside: bool) { return outside; };", + "void afterFragment": "() {}", + }, + } + ); + } + return this._defaultLineShader; + } + + ////////////////////////////////////////////// + // Setting + ////////////////////////////////////////////// + _adjustDimensions(width, height) { + // TODO: find max texture size + return { adjustedWidth: width, adjustedHeight: height }; + } + + _applyStencilTestIfClipping() { + // TODO + } + + ////////////////////////////////////////////// + // Shader hooks + ////////////////////////////////////////////// + populateHooks(shader, src, shaderType) { + if (!src.includes('fn main')) return src; + + // Apply some p5-specific preprocessing. WGSL doesn't have preprocessor + // statements, but some of our shaders might need it, so we add a lightweight + // way to add code if a hook is augmented. e.g.: + // struct Uniforms { + // // @p5 ifdef Vertex getWorldInputs + // uModelMatrix: mat4f, + // uViewMatrix: mat4f, + // // @p5 endif + // // @p5 ifndef Vertex getWorldInputs + // uModelViewMatrix: mat4f, + // // @p5 endif + // } + src = src.replace( + /\/\/ @p5 (ifdef|ifndef) (\w+)\s+(\w+)\n((?:(?!\/\/ @p5)(?:.|\n))*)\/\/ @p5 endif/g, + (_, condition, hookType, hookName, body) => { + const target = condition === 'ifdef'; + if ( + ( + !!shader.hooks.modified.vertex[`${hookType} ${hookName}`] || + !!shader.hooks.modified.fragment[`${hookType} ${hookName}`] + ) === target + ) { + return body; + } else { + return ''; + } + } + ); + + let [preMain, main, postMain] = src.split(/((?:@(?:vertex|fragment)\s*)?fn main)/); + + let uniforms = ''; + for (const key in shader.hooks.uniforms) { + const [type, name] = key.split(/\s+/); + uniforms += `${name}: ${type},\n`; + } + preMain = preMain.replace(/struct\s+Uniforms\s+\{/, `$&\n${uniforms}`); + + let hooks = ''; + let defines = ''; + if (shader.hooks.declarations) { + hooks += shader.hooks.declarations + '\n'; + } + if (shader.hooks[shaderType].declarations) { + hooks += shader.hooks[shaderType].declarations + '\n'; + } + for (const hookDef in shader.hooks.helpers) { + const [hookType, hookName] = hookDef.split(' '); + const [_, params, body] = /^(\([^\)]*\))((?:.|\n)*)$/.exec(shader.hooks.helpers[hookDef]); + if (hookType === 'void') { + hooks += `fn ${hookName}${params}${body}\n`; + } else { + hooks += `fn ${hookName}${params} -> ${hookType}${body}\n`; + } + } + for (const hookDef in shader.hooks[shaderType]) { + if (hookDef === 'declarations') continue; + const [hookType, hookName] = hookDef.split(' '); + + // Add a const so that if the shader wants to + // optimize away the extra function calls in main, it can do so + defines += `const AUGMENTED_HOOK_${hookName} = ${ + shader.hooks.modified[shaderType][hookDef] ? 'true' : 'false' + };\n`; + + const [_, params, body] = /^(\([^\)]*\))((?:.|\n)*)$/.exec(shader.hooks[shaderType][hookDef]); + if (hookType === 'void') { + hooks += `fn HOOK_${hookName}${params}${body}\n`; + } else { + hooks += `fn HOOK_${hookName}${params} -> ${hookType}${body}\n`; + } + } + + return preMain + '\n' + defines + hooks + main + postMain; + } + + ////////////////////////////////////////////// + // Buffer management for pixel reading + ////////////////////////////////////////////// + + _ensurePixelReadBuffer(requiredSize) { + // Create or resize staging buffer if needed + if (!this.pixelReadBuffer || this.pixelReadBufferSize < requiredSize) { + // Clean up old buffer + if (this.pixelReadBuffer) { + this.pixelReadBuffer.destroy(); + } + + // Create new buffer with padding to avoid frequent recreations + // Scale by 2 to ensure integer size and reasonable headroom + const bufferSize = Math.max(requiredSize, this.pixelReadBufferSize * 2); + this.pixelReadBuffer = this.device.createBuffer({ + size: bufferSize, + usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ, + }); + this.pixelReadBufferSize = bufferSize; + } + return this.pixelReadBuffer; + } + + _alignBytesPerRow(bytesPerRow) { + // WebGPU requires bytesPerRow to be a multiple of 256 bytes for texture-to-buffer copies + return Math.ceil(bytesPerRow / 256) * 256; + } + + ////////////////////////////////////////////// + // Framebuffer methods + ////////////////////////////////////////////// + + defaultFramebufferAlpha() { + return true + } + + defaultFramebufferAntialias() { + return true; + } + + supportsFramebufferAntialias() { + return true; + } + + createFramebufferResources(framebuffer) { + } + + validateFramebufferFormats(framebuffer) { + if (![ + constants.UNSIGNED_BYTE, + constants.FLOAT, + constants.HALF_FLOAT + ].includes(framebuffer.format)) { + console.warn( + 'Unknown Framebuffer format. ' + + 'Please use UNSIGNED_BYTE, FLOAT, or HALF_FLOAT. ' + + 'Defaulting to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + + if (framebuffer.useDepth && ![ + constants.UNSIGNED_INT, + constants.FLOAT + ].includes(framebuffer.depthFormat)) { + console.warn( + 'Unknown Framebuffer depth format. ' + + 'Please use UNSIGNED_INT or FLOAT. Defaulting to FLOAT.' + ); + framebuffer.depthFormat = constants.FLOAT; + } + } + + recreateFramebufferTextures(framebuffer) { + // Clean up existing textures + if (framebuffer.colorTexture && framebuffer.colorTexture.destroy) { + framebuffer.colorTexture.destroy(); + } + if (framebuffer.aaColorTexture && framebuffer.aaColorTexture.destroy) { + framebuffer.aaColorTexture.destroy(); + } + if (framebuffer.depthTexture && framebuffer.depthTexture.destroy) { + framebuffer.depthTexture.destroy(); + } + if (framebuffer.aaDepthTexture && framebuffer.aaDepthTexture.destroy) { + framebuffer.aaDepthTexture.destroy(); + } + + const baseDescriptor = { + size: { + width: framebuffer.width * framebuffer.density, + height: framebuffer.height * framebuffer.density, + depthOrArrayLayers: 1, + }, + format: this._getWebGPUColorFormat(framebuffer), + }; + + // Create non-multisampled texture for texture binding (always needed) + const colorTextureDescriptor = { + ...baseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_SRC, + sampleCount: 1, + }; + framebuffer.colorTexture = this.device.createTexture(colorTextureDescriptor); + + // Create multisampled texture for rendering if antialiasing is enabled + if (framebuffer.antialias) { + const aaColorTextureDescriptor = { + ...baseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT, + sampleCount: this._getValidSampleCount(framebuffer.antialiasSamples), + }; + framebuffer.aaColorTexture = this.device.createTexture(aaColorTextureDescriptor); + } + + if (framebuffer.useDepth) { + const depthBaseDescriptor = { + size: { + width: framebuffer.width * framebuffer.density, + height: framebuffer.height * framebuffer.density, + depthOrArrayLayers: 1, + }, + format: this._getWebGPUDepthFormat(framebuffer), + }; + + // Create non-multisampled depth texture for texture binding (always needed) + const depthTextureDescriptor = { + ...depthBaseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING, + sampleCount: 1, + }; + framebuffer.depthTexture = this.device.createTexture(depthTextureDescriptor); + + // Create multisampled depth texture for rendering if antialiasing is enabled + if (framebuffer.antialias) { + const aaDepthTextureDescriptor = { + ...depthBaseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT, + sampleCount: this._getValidSampleCount(framebuffer.antialiasSamples), + }; + framebuffer.aaDepthTexture = this.device.createTexture(aaDepthTextureDescriptor); + } + } + + // Clear the framebuffer textures after creation + this._clearFramebufferTextures(framebuffer); + } + + _clearFramebufferTextures(framebuffer) { + const commandEncoder = this.device.createCommandEncoder(); + + // Clear the color texture (and multisampled texture if it exists) + const colorTexture = framebuffer.aaColorTexture || framebuffer.colorTexture; + const colorAttachment = { + view: colorTexture.createView(), + loadOp: "clear", + storeOp: "store", + clearValue: { r: 0, g: 0, b: 0, a: 0 }, + resolveTarget: framebuffer.aaColorTexture ? + framebuffer.colorTexture.createView() : undefined, + }; + + // Clear the depth texture if it exists + const depthTexture = framebuffer.aaDepthTexture || framebuffer.depthTexture; + const depthStencilAttachment = depthTexture ? { + view: depthTexture.createView(), + depthLoadOp: "clear", + depthStoreOp: "store", + depthClearValue: 1.0, + stencilLoadOp: "clear", + stencilStoreOp: "store", + depthReadOnly: false, + stencilReadOnly: false, + } : undefined; + + const renderPassDescriptor = { + colorAttachments: [colorAttachment], + depthStencilAttachment: depthStencilAttachment, + }; + + const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); + passEncoder.end(); + + this.queue.submit([commandEncoder.finish()]); + } + + _getFramebufferColorTextureView(framebuffer) { + if (framebuffer.colorTexture) { + return framebuffer.colorTexture.createView(); + } + return null; + } + + createFramebufferTextureHandle(framebufferTexture) { + const src = framebufferTexture; + let renderer = this; + return { + get view() { + return renderer._getFramebufferColorTextureView(src.framebuffer); + }, + get gpuTexture() { + return src.framebuffer.colorTexture; + } + }; + } + + _getWebGPUColorFormat(framebuffer) { + if (framebuffer.format === constants.FLOAT) { + return framebuffer.channels === RGBA ? 'rgba32float' : 'rgba32float'; + } else if (framebuffer.format === constants.HALF_FLOAT) { + return framebuffer.channels === RGBA ? 'rgba16float' : 'rgba16float'; + } else { + return framebuffer.channels === RGBA ? 'rgba8unorm' : 'rgba8unorm'; + } + } + + _getWebGPUDepthFormat(framebuffer) { + if (framebuffer.useStencil) { + return framebuffer.depthFormat === constants.FLOAT ? 'depth32float-stencil8' : 'depth24plus-stencil8'; + } else { + return framebuffer.depthFormat === constants.FLOAT ? 'depth32float' : 'depth24plus'; + } + } + + _deleteFramebufferTexture(texture) { + const handle = texture.rawTexture(); + if (handle.texture && handle.texture.destroy) { + handle.texture.destroy(); + } + this.textures.delete(texture); + } + + deleteFramebufferTextures(framebuffer) { + this._deleteFramebufferTexture(framebuffer.color) + if (framebuffer.depth) this._deleteFramebufferTexture(framebuffer.depth); + } + + deleteFramebufferResources(framebuffer) { + if (framebuffer.colorTexture && framebuffer.colorTexture.destroy) { + framebuffer.colorTexture.destroy(); + } + if (framebuffer.depthTexture && framebuffer.depthTexture.destroy) { + framebuffer.depthTexture.destroy(); + } + if (framebuffer.aaDepthTexture && framebuffer.aaDepthTexture.destroy) { + framebuffer.aaDepthTexture.destroy(); + } + } + + getFramebufferToBind(framebuffer) { + } + + updateFramebufferTexture(framebuffer, property) { + // No-op for WebGPU since antialiasing is handled at pipeline level + } + + bindFramebuffer(framebuffer) {} + + async readFramebufferPixels(framebuffer) { + const width = framebuffer.width * framebuffer.density; + const height = framebuffer.height * framebuffer.density; + const bytesPerPixel = 4; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; + + // const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + const stagingBuffer = this.device.createBuffer({ + size: bufferSize, + usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ, + }); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: framebuffer.colorTexture, + origin: { x: 0, y: 0, z: 0 }, + mipLevel: 0, + aspect: 'all' + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow, rowsPerImage: height }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + // Wait for the copy operation to complete + // await this.queue.onSubmittedWorkDone(); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + + // If alignment was needed, extract the actual pixel data + if (alignedBytesPerRow === unalignedBytesPerRow) { + const result = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + stagingBuffer.unmap(); + return result; + } else { + // Need to extract pixel data from aligned buffer + const result = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + result.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + stagingBuffer.unmap(); + return result; + } + } + + async readFramebufferPixel(framebuffer, x, y) { + // Ensure all pending GPU work is complete before reading pixels + await this.queue.onSubmittedWorkDone(); + + const bytesPerPixel = 4; + const alignedBytesPerRow = this._alignBytesPerRow(bytesPerPixel); + const bufferSize = alignedBytesPerRow; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: framebuffer.colorTexture, + origin: { x, y, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, + { width: 1, height: 1, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + const pixelData = new Uint8Array(mappedRange); + const result = [pixelData[0], pixelData[1], pixelData[2], pixelData[3]]; + + stagingBuffer.unmap(); + return result; + } + + async readFramebufferRegion(framebuffer, x, y, w, h) { + // Ensure all pending GPU work is complete before reading pixels + await this.queue.onSubmittedWorkDone(); + + const width = w * framebuffer.density; + const height = h * framebuffer.density; + const bytesPerPixel = 4; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: framebuffer.colorTexture, + mipLevel: 0, + origin: { x: x * framebuffer.density, y: y * framebuffer.density, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + + let pixelData; + if (alignedBytesPerRow === unalignedBytesPerRow) { + pixelData = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + } else { + // Need to extract pixel data from aligned buffer + pixelData = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + pixelData.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + } + + // WebGPU doesn't need vertical flipping unlike WebGL + const region = new Image(width, height); + region.imageData = region.canvas.getContext('2d').createImageData(width, height); + region.imageData.data.set(pixelData); + region.pixels = region.imageData.data; + region.updatePixels(); + + if (framebuffer.density !== 1) { + region.pixelDensity(framebuffer.density); + } + + stagingBuffer.unmap(); + return region; + } + + updateFramebufferPixels(framebuffer) { + const width = framebuffer.width * framebuffer.density; + const height = framebuffer.height * framebuffer.density; + const bytesPerPixel = 4; + + const expectedLength = width * height * bytesPerPixel; + if (!framebuffer.pixels || framebuffer.pixels.length !== expectedLength) { + throw new Error( + 'The pixels array has not been set correctly. Please call loadPixels() before updatePixels().' + ); + } + + this.device.queue.writeTexture( + { texture: framebuffer.colorTexture }, + framebuffer.pixels, + { + bytesPerRow: width * bytesPerPixel, + rowsPerImage: height + }, + { width, height, depthOrArrayLayers: 1 } + ); + } + + ////////////////////////////////////////////// + // Main canvas pixel methods + ////////////////////////////////////////////// + + _ensureCanvasReadbackTexture() { + if (!this.canvasReadbackTexture) { + const width = Math.ceil(this.width * this._pixelDensity); + const height = Math.ceil(this.height * this._pixelDensity); + + this.canvasReadbackTexture = this.device.createTexture({ + size: { width, height, depthOrArrayLayers: 1 }, + format: this.presentationFormat, + usage: GPUTextureUsage.COPY_DST | GPUTextureUsage.COPY_SRC, + }); + } + return this.canvasReadbackTexture; + } + + _copyCanvasToReadbackTexture() { + // Get the current canvas texture BEFORE any awaiting + const canvasTexture = this.drawingContext.getCurrentTexture(); + + // Ensure readback texture exists + const readbackTexture = this._ensureCanvasReadbackTexture(); + + // Copy canvas texture to readback texture immediately + const copyEncoder = this.device.createCommandEncoder(); + copyEncoder.copyTextureToTexture( + { texture: canvasTexture }, + { texture: readbackTexture }, + { + width: Math.ceil(this.width * this._pixelDensity), + height: Math.ceil(this.height * this._pixelDensity), + depthOrArrayLayers: 1 + } + ); + this.device.queue.submit([copyEncoder.finish()]); + + return readbackTexture; + } + + _convertBGRtoRGB(pixelData) { + // Convert BGR to RGB by swapping red and blue channels + for (let i = 0; i < pixelData.length; i += 4) { + const temp = pixelData[i]; // Store red + pixelData[i] = pixelData[i + 2]; // Red = Blue + pixelData[i + 2] = temp; // Blue = Red + // Green (i + 1) and Alpha (i + 3) stay the same + } + return pixelData; + } + + async loadPixels() { + const width = this.width * this._pixelDensity; + const height = this.height * this._pixelDensity; + + // Copy canvas to readback texture + const readbackTexture = this._copyCanvasToReadbackTexture(); + + // Now we can safely await + await this.queue.onSubmittedWorkDone(); + + const bytesPerPixel = 4; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { texture: readbackTexture }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + + if (alignedBytesPerRow === unalignedBytesPerRow) { + this.pixels = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + } else { + // Need to extract pixel data from aligned buffer + this.pixels = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + this.pixels.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + } + + // Convert BGR to RGB for main canvas + this._convertBGRtoRGB(this.pixels); + + stagingBuffer.unmap(); + return this.pixels; + } + + async _getPixel(x, y) { + // Copy canvas to readback texture + const readbackTexture = this._copyCanvasToReadbackTexture(); + + // Now we can safely await + await this.queue.onSubmittedWorkDone(); + + const bytesPerPixel = 4; + const alignedBytesPerRow = this._alignBytesPerRow(bytesPerPixel); + const bufferSize = alignedBytesPerRow; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: readbackTexture, + origin: { x, y, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, + { width: 1, height: 1, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + const pixelData = new Uint8Array(mappedRange); + + // Convert BGR to RGB for main canvas - swap red and blue + const result = [pixelData[2], pixelData[1], pixelData[0], pixelData[3]]; + + stagingBuffer.unmap(); + return result; + } + + async get(x, y, w, h) { + const pd = this._pixelDensity; + + if (typeof x === 'undefined' && typeof y === 'undefined') { + // get() - return entire canvas + x = y = 0; + w = this.width; + h = this.height; + } else { + x *= pd; + y *= pd; + + if (typeof w === 'undefined' && typeof h === 'undefined') { + // get(x,y) - single pixel + if (x < 0 || y < 0 || x >= this.width * pd || y >= this.height * pd) { + return [0, 0, 0, 0]; + } + + return this._getPixel(x, y); + } + // get(x,y,w,h) - region + } + + // Copy canvas to readback texture + const readbackTexture = this._copyCanvasToReadbackTexture(); + + // Now we can safely await + await this.queue.onSubmittedWorkDone(); + + // Read region and create p5.Image + const width = w * pd; + const height = h * pd; + const bytesPerPixel = 4; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: readbackTexture, + origin: { x, y, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + await this.queue.onSubmittedWorkDone(); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + + let pixelData; + if (alignedBytesPerRow === unalignedBytesPerRow) { + pixelData = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + } else { + // Need to extract pixel data from aligned buffer + pixelData = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + pixelData.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + } + + // Convert BGR to RGB for main canvas + this._convertBGRtoRGB(pixelData); + + const region = new Image(width, height); + region.pixelDensity(pd); + const ctx = region.canvas.getContext('2d'); + const imageData = ctx.createImageData(width, height); + imageData.data.set(pixelData); + ctx.putImageData(imageData, 0, 0); + + stagingBuffer.unmap(); + return region; + } +} + +function rendererWebGPU(p5, fn) { + p5.RendererWebGPU = RendererWebGPU; + + p5.renderers[constants.WEBGPU] = p5.RendererWebGPU; + fn.ensureTexture = function(source) { + return this._renderer.ensureTexture(source); + } + + // TODO: move this and the duplicate in the WebGL renderer to another file + fn.setAttributes = async function (key, value) { + return this._renderer._setAttributes(key, value); + } +} + +export default rendererWebGPU; +export { RendererWebGPU }; diff --git a/src/webgpu/shaders/color.js b/src/webgpu/shaders/color.js new file mode 100644 index 0000000000..b22818efa2 --- /dev/null +++ b/src/webgpu/shaders/color.js @@ -0,0 +1,115 @@ +import { getTexture } from './utils'; + +const uniforms = ` +struct Uniforms { +// @p5 ifdef Vertex getWorldInputs + uModelMatrix: mat4x4, + uViewMatrix: mat4x4, + uModelNormalMatrix: mat3x3, + uCameraNormalMatrix: mat3x3, +// @p5 endif +// @p5 ifndef Vertex getWorldInputs + uModelViewMatrix: mat4x4, + uNormalMatrix: mat3x3, +// @p5 endif + uProjectionMatrix: mat4x4, + uMaterialColor: vec4, + uUseVertexColor: u32, +}; +`; + +export const colorVertexShader = ` +struct VertexInput { + @location(0) aPosition: vec3, + @location(1) aNormal: vec3, + @location(2) aTexCoord: vec2, + @location(3) aVertexColor: vec4, +}; + +struct VertexOutput { + @builtin(position) Position: vec4, + @location(0) vVertexNormal: vec3, + @location(1) vVertTexCoord: vec2, + @location(2) vColor: vec4, +}; + +${uniforms} +@group(0) @binding(0) var uniforms: Uniforms; + +struct Vertex { + position: vec3, + normal: vec3, + texCoord: vec2, + color: vec4, +} + +@vertex +fn main(input: VertexInput) -> VertexOutput { + HOOK_beforeVertex(); + var output: VertexOutput; + + let useVertexColor = (uniforms.uUseVertexColor != 0); + var inputs = Vertex( + input.aPosition, + input.aNormal, + input.aTexCoord, + select(uniforms.uMaterialColor, input.aVertexColor, useVertexColor) + ); + +// @p5 ifdef Vertex getObjectInputs + inputs = HOOK_getObjectInputs(inputs); +// @p5 endif + +// @p5 ifdef Vertex getWorldInputs + inputs.position = (uniforms.uModelMatrix * vec4(inputs.position, 1.0)).xyz; + inputs.normal = uniforms.uModelNormalMatrix * inputs.normal; + inputs = HOOK_getWorldInputs(inputs); +// @p5 endif + +// @p5 ifdef Vertex getWorldInputs + // Already multiplied by the model matrix, just apply view + inputs.position = (uniforms.uViewMatrix * vec4(inputs.position, 1.0)).xyz; + inputs.normal = uniforms.uCameraNormalMatrix * inputs.normal; +// @p5 endif +// @p5 ifndef Vertex getWorldInputs + // Apply both at once + inputs.position = (uniforms.uModelViewMatrix * vec4(inputs.position, 1.0)).xyz; + inputs.normal = uniforms.uNormalMatrix * inputs.normal; +// @p5 endif + +// @p5 ifdef Vertex getCameraInputs + inputs = HOOK_getCameraInputs(inputs); +// @p5 endif + + output.vVertTexCoord = inputs.texCoord; + output.vVertexNormal = normalize(inputs.normal); + output.vColor = inputs.color; + + output.Position = uniforms.uProjectionMatrix * vec4(inputs.position, 1.0); + + HOOK_afterVertex(); + return output; +} +`; + +export const colorFragmentShader = ` +struct FragmentInput { + @location(0) vVertexNormal: vec3, + @location(1) vVertTexCoord: vec2, + @location(2) vColor: vec4, +}; + +${uniforms} +@group(0) @binding(0) var uniforms: Uniforms; + +${getTexture} + +@fragment +fn main(input: FragmentInput) -> @location(0) vec4 { + HOOK_beforeFragment(); + var outColor = HOOK_getFinalColor(input.vColor); + outColor = vec4(outColor.rgb * outColor.a, outColor.a); + HOOK_afterFragment(); + return outColor; +} +`; diff --git a/src/webgpu/shaders/line.js b/src/webgpu/shaders/line.js new file mode 100644 index 0000000000..5c01ddd1bf --- /dev/null +++ b/src/webgpu/shaders/line.js @@ -0,0 +1,360 @@ +import { getTexture } from './utils' + +const uniforms = ` +struct Uniforms { +// @p5 ifdef StrokeVertex getWorldInputs + uModelMatrix: mat4x4, + uViewMatrix: mat4x4, +// @p5 endif +// @p5 ifndef StrokeVertex getWorldInputs + uModelViewMatrix: mat4x4, +// @p5 endif + uMaterialColor: vec4, + uProjectionMatrix: mat4x4, + uStrokeWeight: f32, + uUseLineColor: f32, + uSimpleLines: f32, + uViewport: vec4, + uPerspective: u32, + uStrokeCap: u32, + uStrokeJoin: u32, +}`; + +export const lineVertexShader = ` +struct StrokeVertexInput { + @location(0) aPosition: vec3, + @location(1) aSide: f32, + @location(2) aTangentIn: vec3, + @location(3) aTangentOut: vec3, + @location(4) aVertexColor: vec4, +}; + +struct StrokeVertexOutput { + @builtin(position) Position: vec4, + @location(0) vColor: vec4, + @location(1) vTangent: vec2, + @location(2) vCenter: vec2, + @location(3) vPosition: vec2, + @location(4) vMaxDist: f32, + @location(5) vCap: f32, + @location(6) vJoin: f32, + @location(7) vStrokeWeight: f32, +}; + +${uniforms} +@group(0) @binding(0) var uniforms: Uniforms; + +struct StrokeVertex { + position: vec3, + tangentIn: vec3, + tangentOut: vec3, + color: vec4, + weight: f32, +} + +fn lineIntersection(aPoint: vec2f, aDir: vec2f, bPoint: vec2f, bDir: vec2f) -> vec2f { + // Rotate and translate so a starts at the origin and goes out to the right + var bMutPoint = bPoint; + bMutPoint -= aPoint; + var rotatedBFrom = vec2( + bMutPoint.x*aDir.x + bMutPoint.y*aDir.y, + bMutPoint.y*aDir.x - bMutPoint.x*aDir.y + ); + var bTo = bMutPoint + bDir; + var rotatedBTo = vec2( + bTo.x*aDir.x + bTo.y*aDir.y, + bTo.y*aDir.x - bTo.x*aDir.y + ); + var intersectionDistance = + rotatedBTo.x + (rotatedBFrom.x - rotatedBTo.x) * rotatedBTo.y / + (rotatedBTo.y - rotatedBFrom.y); + return aPoint + aDir * intersectionDistance; +} + +@vertex +fn main(input: StrokeVertexInput) -> StrokeVertexOutput { + HOOK_beforeVertex(); + var output: StrokeVertexOutput; + let simpleLines = (uniforms.uSimpleLines != 0.); + if (!simpleLines) { + if (all(input.aTangentIn == vec3()) != all(input.aTangentOut == vec3())) { + output.vCap = 1.; + } else { + output.vCap = 0.; + } + let conditionA = any(input.aTangentIn != vec3()); + let conditionB = any(input.aTangentOut != vec3()); + let conditionC = any(input.aTangentIn != input.aTangentOut); + if (conditionA && conditionB && conditionC) { + output.vJoin = 1.; + } else { + output.vJoin = 0.; + } + } + var lineColor: vec4; + if (uniforms.uUseLineColor != 0.) { + lineColor = input.aVertexColor; + } else { + lineColor = uniforms.uMaterialColor; + } + var inputs = StrokeVertex( + input.aPosition.xyz, + input.aTangentIn, + input.aTangentOut, + lineColor, + uniforms.uStrokeWeight + ); + +// @p5 ifdef StrokeVertex getObjectInputs + inputs = HOOK_getObjectInputs(inputs); +// @p5 endif + +// @p5 ifdef StrokeVertex getWorldInputs + inputs.position = (uniforms.uModelMatrix * vec4(inputs.position, 1.)).xyz; + inputs.tangentIn = (uniforms.uModelMatrix * vec4(input.aTangentIn, 1.)).xyz; + inputs.tangentOut = (uniforms.uModelMatrix * vec4(input.aTangentOut, 1.)).xyz; + inputs = HOOK_getWorldInputs(inputs); +// @p5 endif + +// @p5 ifdef StrokeVertex getWorldInputs + // Already multiplied by the model matrix, just apply view + inputs.position = (uniforms.uViewMatrix * vec4(inputs.position, 1.)).xyz; + inputs.tangentIn = (uniforms.uViewMatrix * vec4(input.aTangentIn, 0.)).xyz; + inputs.tangentOut = (uniforms.uViewMatrix * vec4(input.aTangentOut, 0.)).xyz; +// @p5 endif +// @p5 ifndef StrokeVertex getWorldInputs + // Apply both at once + inputs.position = (uniforms.uModelViewMatrix * vec4(inputs.position, 1.)).xyz; + inputs.tangentIn = (uniforms.uModelViewMatrix * vec4(input.aTangentIn, 0.)).xyz; + inputs.tangentOut = (uniforms.uModelViewMatrix * vec4(input.aTangentOut, 0.)).xyz; +// @p5 endif +// @p5 ifdef StrokeVertex getCameraInputs + inputs = HOOK_getCameraInputs(inputs); +// @p5 endif + + var posp = vec4(inputs.position, 1.); + var posqIn = vec4(inputs.position + inputs.tangentIn, 1.); + var posqOut = vec4(inputs.position + inputs.tangentOut, 1.); + output.vStrokeWeight = inputs.weight; + + var facingCamera = pow( + // The word space tangent's z value is 0 if it's facing the camera + abs(normalize(posqIn-posp).z), + + // Using pow() here to ramp 'facingCamera' up from 0 to 1 really quickly + // so most lines get scaled and don't get clipped + 0.25 + ); + + // Moving vertices slightly toward the camera + // to avoid depth-fighting with the fill triangles. + // A mix of scaling and offsetting is used based on distance + // Discussion here: + // https://github.com/processing/p5.js/issues/7200 + + // using a scale <1 moves the lines towards nearby camera + // in order to prevent popping effects due to half of + // the line disappearing behind the geometry faces. + var zDistance = -posp.z; + var distanceFactor = smoothstep(0., 800., zDistance); + + // Discussed here: + // http://www.opengl.org/discussion_boards/ubbthreads.php?ubb=showflat&Number=252848 + var scale = mix(1., 0.995, facingCamera); + var dynamicScale = mix(scale, 1.0, distanceFactor); // Closer = more scale, farther = less + + posp = vec4(posp.xyz * dynamicScale, posp.w); + posqIn = vec4(posqIn.xyz * dynamicScale, posqIn.w); + posqOut= vec4(posqOut.xyz * dynamicScale, posqOut.w); + + // Moving vertices slightly toward camera when far away + // https://github.com/processing/p5.js/issues/6956 + var zOffset = mix(0., -1., facingCamera); + var dynamicZAdjustment = mix(0., zOffset, distanceFactor); // Closer = less zAdjustment, farther = more + + posp.z -= dynamicZAdjustment; + posqIn.z -= dynamicZAdjustment; + posqOut.z -= dynamicZAdjustment; + + var p = uniforms.uProjectionMatrix * posp; + var qIn = uniforms.uProjectionMatrix * posqIn; + var qOut = uniforms.uProjectionMatrix * posqOut; + + var tangentIn = normalize((qIn.xy * p.w - p.xy * qIn.w) * uniforms.uViewport.zw); + var tangentOut = normalize((qOut.xy * p.w - p.xy * qOut.w) * uniforms.uViewport.zw); + + var curPerspScale = vec2(); + if (uniforms.uPerspective == 1) { + // Perspective --- + // convert from world to clip by multiplying with projection scaling factor + // to get the right thickness (see https://github.com/processing/processing/issues/5182) + + // The y value of the projection matrix may be flipped if rendering to a Framebuffer. + // Multiplying again by its sign here negates the flip to get just the scale. + curPerspScale = (uniforms.uProjectionMatrix * vec4(1., sign(uniforms.uProjectionMatrix[1][1]), 0., 0.)).xy; + } else { + // No Perspective --- + // multiply by W (to cancel out division by W later in the pipeline) and + // convert from screen to clip (derived from clip to screen above) + curPerspScale = p.w / (0.5 * uniforms.uViewport.zw); + } + + var offset = vec2(); + if (output.vJoin == 1. && !simpleLines) { + output.vTangent = normalize(tangentIn + tangentOut); + var normalIn = vec2(-tangentIn.y, tangentIn.x); + var normalOut = vec2(-tangentOut.y, tangentOut.x); + var side = sign(input.aSide); + var sideEnum = abs(input.aSide); + + // We generate vertices for joins on either side of the centerline, but + // the "elbow" side is the only one needing a join. By not setting the + // offset for the other side, all its vertices will end up in the same + // spot and not render, effectively discarding it. + if (sign(dot(tangentOut, vec2(-tangentIn.y, tangentIn.x))) != side) { + // Side enums: + // 1: the side going into the join + // 2: the middle of the join + // 3: the side going out of the join + if (sideEnum == 2.) { + // Calculate the position + tangent on either side of the join, and + // find where the lines intersect to find the elbow of the join + var c = (posp.xy / posp.w + vec2(1.)) * 0.5 * uniforms.uViewport.zw; + + var intersection = lineIntersection( + c + (side * normalIn * inputs.weight / 2.), + tangentIn, + c + (side * normalOut * inputs.weight / 2.), + tangentOut + ); + offset = intersection - c; + + + // When lines are thick and the angle of the join approaches 180, the + // elbow might be really far from the center. We'll apply a limit to + // the magnitude to avoid lines going across the whole screen when this + // happens. + var mag = length(offset); + var maxMag = 3. * inputs.weight; + if (mag > maxMag) { + offset *= maxMag / mag; + } + } else if (sideEnum == 1.) { + offset = side * normalIn * inputs.weight / 2.; + } else if (sideEnum == 3.) { + offset = side * normalOut * inputs.weight / 2.; + } + } + if (uniforms.uStrokeJoin == 2) { + var avgNormal = vec2(-output.vTangent.y, output.vTangent.x); + output.vMaxDist = abs(dot(avgNormal, normalIn * inputs.weight / 2.)); + } else { + output.vMaxDist = inputs.weight / 2.; + } + } else { + var tangent: vec2; + if (all(input.aTangentIn == vec3())) { + tangent = tangentOut; + } else { + tangent = tangentIn; + } + output.vTangent = tangent; + var normal = vec2(-tangent.y, tangent.x); + + var normalOffset = sign(input.aSide); + // Caps will have side values of -2 or 2 on the edge of the cap that + // extends out from the line + var tangentOffset = abs(input.aSide) - 1.; + offset = (normal * normalOffset + tangent * tangentOffset) * + inputs.weight * 0.5; + output.vMaxDist = inputs.weight / 2.; + } + output.vCenter = p.xy; + output.vPosition = output.vCenter + offset; + output.vColor = inputs.color; + + output.Position = vec4( + p.xy + offset.xy * curPerspScale, + p.zw + ); + HOOK_afterVertex(); + return output; +}`; + +export const lineFragmentShader = ` +struct StrokeFragmentInput { + @location(0) vColor: vec4, + @location(1) vTangent: vec2, + @location(2) vCenter: vec2, + @location(3) vPosition: vec2, + @location(4) vMaxDist: f32, + @location(5) vCap: f32, + @location(6) vJoin: f32, + @location(7) vStrokeWeight: f32, +} + +${uniforms} +@group(0) @binding(0) var uniforms: Uniforms; + +${getTexture} + +fn distSquared(a: vec2, b: vec2) -> f32 { + return dot(b - a, b - a); +} + +struct Inputs { + color: vec4, + tangent: vec2, + center: vec2, + position: vec2, + strokeWeight: f32, +} + +@fragment +fn main(input: StrokeFragmentInput) -> @location(0) vec4 { + HOOK_beforeFragment(); + + var inputs: Inputs; + inputs.color = input.vColor; + inputs.tangent = input.vTangent; + inputs.center = input.vCenter; + inputs.position = input.vPosition; + inputs.strokeWeight = input.vStrokeWeight; + inputs = HOOK_getPixelInputs(inputs); + + if (input.vCap > 0.) { + if ( + uniforms.uStrokeCap == STROKE_CAP_ROUND && + HOOK_shouldDiscard(distSquared(inputs.position, inputs.center) > inputs.strokeWeight * inputs.strokeWeight * 0.25) + ) { + discard; + } else if ( + uniforms.uStrokeCap == STROKE_CAP_SQUARE && + HOOK_shouldDiscard(dot(inputs.position - inputs.center, inputs.tangent) > 0.) + ) { + discard; + } else if (HOOK_shouldDiscard(false)) { + discard; + } + } else if (input.vJoin > 0.) { + if ( + uniforms.uStrokeJoin == STROKE_JOIN_ROUND && + HOOK_shouldDiscard(distSquared(inputs.position, inputs.center) > inputs.strokeWeight * inputs.strokeWeight * 0.25) + ) { + discard; + } else if (uniforms.uStrokeJoin == STROKE_JOIN_BEVEL) { + let normal = vec2(-inputs.tangent.y, -inputs.tangent.x); + if (HOOK_shouldDiscard(abs(dot(inputs.position - inputs.center, normal)) > input.vMaxDist)) { + discard; + } + } else if (HOOK_shouldDiscard(false)) { + discard; + } + } + var col = HOOK_getFinalColor(inputs.color); + col = vec4(col.rgb, 1.0) * col.a; + HOOK_afterFragment(); + return vec4(col); +} +`; + diff --git a/src/webgpu/shaders/material.js b/src/webgpu/shaders/material.js new file mode 100644 index 0000000000..774f131bce --- /dev/null +++ b/src/webgpu/shaders/material.js @@ -0,0 +1,355 @@ +import { getTexture } from './utils'; + +const uniforms = ` +struct Uniforms { +// @p5 ifdef Vertex getWorldInputs + uModelMatrix: mat4x4, + uModelNormalMatrix: mat3x3, + uCameraNormalMatrix: mat3x3, +// @p5 endif +// @p5 ifndef Vertex getWorldInputs + uModelViewMatrix: mat4x4, + uNormalMatrix: mat3x3, +// @p5 endif + uViewMatrix: mat4x4, + uProjectionMatrix: mat4x4, + uMaterialColor: vec4, + uUseVertexColor: u32, + + uHasSetAmbient: u32, + uAmbientColor: vec3, + uSpecularMatColor: vec4, + uAmbientMatColor: vec4, + uEmissiveMatColor: vec4, + + uTint: vec4, + isTexture: u32, + + uCameraRotation: mat3x3, + + uDirectionalLightCount: i32, + uLightingDirection: array, 5>, + uDirectionalDiffuseColors: array, 5>, + uDirectionalSpecularColors: array, 5>, + + uPointLightCount: i32, + uPointLightLocation: array, 5>, + uPointLightDiffuseColors: array, 5>, + uPointLightSpecularColors: array, 5>, + + uSpotLightCount: i32, + uSpotLightAngle: vec4, + uSpotLightConc: vec4, + uSpotLightDiffuseColors: array, 4>, + uSpotLightSpecularColors: array, 4>, + uSpotLightLocation: array, 4>, + uSpotLightDirection: array, 4>, + + uSpecular: u32, + uShininess: f32, + uMetallic: f32, + + uConstantAttenuation: f32, + uLinearAttenuation: f32, + uQuadraticAttenuation: f32, + + uUseImageLight: u32, + uUseLighting: u32, +}; +`; + +export const materialVertexShader = ` +struct VertexInput { + @location(0) aPosition: vec3, + @location(1) aNormal: vec3, + @location(2) aTexCoord: vec2, + @location(3) aVertexColor: vec4, +}; + +struct VertexOutput { + @builtin(position) Position: vec4, + @location(0) vNormal: vec3, + @location(1) vTexCoord: vec2, + @location(2) vViewPosition: vec3, + @location(4) vColor: vec4, +}; + +${uniforms} +@group(0) @binding(0) var uniforms: Uniforms; + +struct Vertex { + position: vec3, + normal: vec3, + texCoord: vec2, + color: vec4, +} + +@vertex +fn main(input: VertexInput) -> VertexOutput { + HOOK_beforeVertex(); + var output: VertexOutput; + + let useVertexColor = (uniforms.uUseVertexColor != 0); + var inputs = Vertex( + input.aPosition, + input.aNormal, + input.aTexCoord, + select(uniforms.uMaterialColor, input.aVertexColor, useVertexColor) + ); + +// @p5 ifdef Vertex getObjectInputs + inputs = HOOK_getObjectInputs(inputs); +// @p5 endif + +// @p5 ifdef Vertex getWorldInputs + inputs.position = (uniforms.uModelMatrix * vec4(inputs.position, 1.0)).xyz; + inputs.normal = uniforms.uModelNormalMatrix * inputs.normal; + inputs = HOOK_getWorldInputs(inputs); +// @p5 endif + +// @p5 ifdef Vertex getWorldInputs + // Already multiplied by the model matrix, just apply view + inputs.position = (uniforms.uViewMatrix * vec4(inputs.position, 1.0)).xyz; + inputs.normal = uniforms.uCameraNormalMatrix * inputs.normal; +// @p5 endif +// @p5 ifndef Vertex getWorldInputs + // Apply both at once + inputs.position = (uniforms.uModelViewMatrix * vec4(inputs.position, 1.0)).xyz; + inputs.normal = uniforms.uNormalMatrix * inputs.normal; +// @p5 endif + +// @p5 ifdef Vertex getCameraInputs + inputs = HOOK_getCameraInputs(inputs); +// @p5 endif + + output.vViewPosition = inputs.position; + output.vTexCoord = inputs.texCoord; + output.vNormal = normalize(inputs.normal); + output.vColor = inputs.color; + + output.Position = uniforms.uProjectionMatrix * vec4(inputs.position, 1.0); + + HOOK_afterVertex(); + return output; +} +`; + +export const materialFragmentShader = ` +struct FragmentInput { + @location(0) vNormal: vec3, + @location(1) vTexCoord: vec2, + @location(2) vViewPosition: vec3, + @location(4) vColor: vec4, +}; + +${uniforms} +@group(0) @binding(0) var uniforms: Uniforms; + +@group(0) @binding(1) var uSampler: texture_2d; +@group(0) @binding(2) var uSampler_sampler: sampler; + +struct ColorComponents { + baseColor: vec3, + opacity: f32, + ambientColor: vec3, + specularColor: vec3, + diffuse: vec3, + ambient: vec3, + specular: vec3, + emissive: vec3, +} + +struct Inputs { + normal: vec3, + texCoord: vec2, + ambientLight: vec3, + ambientMaterial: vec3, + specularMaterial: vec3, + emissiveMaterial: vec3, + color: vec4, + shininess: f32, + metalness: f32, +} + +${getTexture} + +struct LightResult { + diffuse: vec3, + specular: vec3, +} +struct LightIntensityResult { + diffuse: f32, + specular: f32, +} + +const specularFactor = 2.0; +const diffuseFactor = 0.73; + +fn phongSpecular( + lightDirection: vec3, + viewDirection: vec3, + surfaceNormal: vec3, + shininess: f32 +) -> f32 { + let R = reflect(lightDirection, surfaceNormal); + return pow(max(0.0, dot(R, viewDirection)), shininess); +} + +fn lambertDiffuse(lightDirection: vec3, surfaceNormal: vec3) -> f32 { + return max(0.0, dot(-lightDirection, surfaceNormal)); +} + +fn singleLight( + viewDirection: vec3, + normal: vec3, + lightVector: vec3, + shininess: f32, + metallic: f32 +) -> LightIntensityResult { + let lightDir = normalize(lightVector); + let specularIntensity = mix(1.0, 0.4, metallic); + let diffuseIntensity = mix(1.0, 0.1, metallic); + let diffuse = lambertDiffuse(lightDir, normal) * diffuseIntensity; + let specular = select( + 0., + phongSpecular(lightDir, viewDirection, normal, shininess) * specularIntensity, + uniforms.uSpecular == 1 + ); + return LightIntensityResult(diffuse, specular); +} + +fn totalLight( + modelPosition: vec3, + normal: vec3, + shininess: f32, + metallic: f32 +) -> LightResult { + var totalSpecular = vec3(0.0, 0.0, 0.0); + var totalDiffuse = vec3(0.0, 0.0, 0.0); + + if (uniforms.uUseLighting == 0) { + return LightResult(vec3(1.0, 1.0, 1.0), totalSpecular); + } + + let viewDirection = normalize(-modelPosition); + + for (var j = 0; j < 5; j++) { + if (j < uniforms.uDirectionalLightCount) { + let lightVector = (uniforms.uViewMatrix * vec4( + uniforms.uLightingDirection[j], + 0.0 + )).xyz; + let lightColor = uniforms.uDirectionalDiffuseColors[j]; + let specularColor = uniforms.uDirectionalSpecularColors[j]; + let result = singleLight(viewDirection, normal, lightVector, shininess, metallic); + totalDiffuse += result.diffuse * lightColor; + totalSpecular += result.specular * specularColor; + } + + if (j < uniforms.uPointLightCount) { + let lightPosition = (uniforms.uViewMatrix * vec4( + uniforms.uPointLightLocation[j], + 1.0 + )).xyz; + let lightVector = modelPosition - lightPosition; + let lightDistance = length(lightVector); + let lightFalloff = 1.0 / ( + uniforms.uConstantAttenuation + + lightDistance * uniforms.uLinearAttenuation + + lightDistance * lightDistance * uniforms.uQuadraticAttenuation + ); + let lightColor = uniforms.uPointLightDiffuseColors[j] * lightFalloff; + let specularColor = uniforms.uPointLightSpecularColors[j] * lightFalloff; + let result = singleLight(viewDirection, normal, lightVector, shininess, metallic); + totalDiffuse += result.diffuse * lightColor; + totalSpecular += result.specular * specularColor; + } + + if (j < uniforms.uSpotLightCount) { + let lightPosition = (uniforms.uViewMatrix * vec4( + uniforms.uSpotLightLocation[j], + 1.0 + )).xyz; + let lightVector = modelPosition - lightPosition; + let lightDistance = length(lightVector); + var lightFalloff = 1.0 / ( + uniforms.uConstantAttenuation + + lightDistance * uniforms.uLinearAttenuation + + lightDistance * lightDistance * uniforms.uQuadraticAttenuation + ); + let lightDirection = (uniforms.uViewMatrix * vec4( + uniforms.uSpotLightDirection[j], + 0.0 + )).xyz; + let spotDot = dot(normalize(lightVector), normalize(lightDirection)); + let spotFalloff = select( + 0.0, + pow(spotDot, uniforms.uSpotLightConc[j]), + spotDot < uniforms.uSpotLightAngle[j] + ); + lightFalloff *= spotFalloff; + let lightColor = uniforms.uSpotLightDiffuseColors[j]; + let specularColor = uniforms.uSpotLightSpecularColors[j]; + let result = singleLight(viewDirection, normal, lightVector, shininess, metallic); + totalDiffuse += result.diffuse * lightColor; + totalSpecular += result.specular * specularColor; + } + } + + // TODO: image light + + return LightResult( + totalDiffuse * diffuseFactor, + totalSpecular * specularFactor + ); +} + +@fragment +fn main(input: FragmentInput) -> @location(0) vec4 { + HOOK_beforeFragment(); + + let color = select( + input.vColor, + getTexture(uSampler, uSampler_sampler, input.vTexCoord) * (uniforms.uTint/255.0), + uniforms.isTexture == 1 + ); // TODO: check isTexture and apply tint + var inputs = Inputs( + normalize(input.vNormal), + input.vTexCoord, + uniforms.uAmbientColor, + select(color.rgb, uniforms.uAmbientMatColor.rgb, uniforms.uHasSetAmbient == 1), + uniforms.uSpecularMatColor.rgb, + uniforms.uEmissiveMatColor.rgb, + color, + uniforms.uShininess, + uniforms.uMetallic + ); + inputs = HOOK_getPixelInputs(inputs); + + let light = totalLight( + input.vViewPosition, + inputs.normal, + inputs.shininess, + inputs.metalness + ); + + let baseColor = inputs.color; + let components = ColorComponents( + baseColor.rgb, + baseColor.a, + inputs.ambientMaterial, + inputs.specularMaterial, + light.diffuse, + inputs.ambientLight, + light.specular, + inputs.emissiveMaterial + ); + + var outColor = HOOK_getFinalColor( + HOOK_combineColors(components) + ); + outColor = vec4(outColor.rgb * outColor.a, outColor.a); + HOOK_afterFragment(); + return outColor; +} +`; diff --git a/src/webgpu/shaders/utils.js b/src/webgpu/shaders/utils.js new file mode 100644 index 0000000000..a6b79426e9 --- /dev/null +++ b/src/webgpu/shaders/utils.js @@ -0,0 +1,10 @@ +export const getTexture = ` +fn getTexture(texture: texture_2d, texSampler: sampler, coord: vec2) -> vec4 { + let color = textureSample(texture, texSampler, coord); + let alpha = color.a; + return vec4( + select(color.rgb / alpha, vec3(0.0), alpha == 0.0), + alpha + ); +} +`; diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js new file mode 100644 index 0000000000..bffc88c219 --- /dev/null +++ b/test/unit/visual/cases/webgpu.js @@ -0,0 +1,309 @@ +import { vi } from "vitest"; +import p5 from "../../../../src/app"; +import { visualSuite, visualTest } from "../visualTest"; +import rendererWebGPU from "../../../../src/webgpu/p5.RendererWebGPU"; + +p5.registerAddon(rendererWebGPU); + +visualSuite("WebGPU", function () { + visualSuite("Shaders", function () { + visualTest( + "The color shader runs successfully", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background("white"); + for (const [i, color] of ["red", "lime", "blue"].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.fill(color); + p5.translate(15, 0); + p5.noStroke(); + p5.circle(0, 0, 20); + p5.pop(); + } + await screenshot(); + }, + ); + + visualTest( + "The stroke shader runs successfully", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background("white"); + for (const [i, color] of ["red", "lime", "blue"].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.translate(15, 0); + p5.stroke(color); + p5.strokeWeight(2); + p5.circle(0, 0, 20); + p5.pop(); + } + await screenshot(); + }, + ); + + visualTest( + "The material shader runs successfully", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background("white"); + p5.ambientLight(50); + p5.directionalLight(100, 100, 100, 0, 1, -1); + p5.pointLight(155, 155, 155, 0, -200, 500); + p5.specularMaterial(255); + p5.shininess(300); + for (const [i, color] of ["red", "lime", "blue"].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.fill(color); + p5.translate(15, 0); + p5.noStroke(); + p5.sphere(10); + p5.pop(); + } + await screenshot(); + }, + ); + + visualTest("Shader hooks can be used", async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + const myFill = p5.baseMaterialShader().modify({ + "Vertex getWorldInputs": `(inputs: Vertex) { + var result = inputs; + result.position.y += 10.0 * sin(inputs.position.x * 0.25); + return result; + }`, + }); + const myStroke = p5.baseStrokeShader().modify({ + "StrokeVertex getWorldInputs": `(inputs: StrokeVertex) { + var result = inputs; + result.position.y += 10.0 * sin(inputs.position.x * 0.25); + return result; + }`, + }); + p5.background("black"); + p5.shader(myFill); + p5.strokeShader(myStroke); + p5.fill("red"); + p5.stroke("white"); + p5.strokeWeight(5); + p5.circle(0, 0, 30); + await screenshot(); + }); + + visualTest( + "Textures in the material shader work", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + const tex = p5.createImage(50, 50); + tex.loadPixels(); + for (let x = 0; x < tex.width; x++) { + for (let y = 0; y < tex.height; y++) { + const off = (x + y * tex.width) * 4; + tex.pixels[off] = p5.round((x / tex.width) * 255); + tex.pixels[off + 1] = p5.round((y / tex.height) * 255); + tex.pixels[off + 2] = 0; + tex.pixels[off + 3] = 255; + } + } + tex.updatePixels(); + p5.texture(tex); + p5.plane(p5.width, p5.height); + + await screenshot(); + }, + ); + }); + + visualSuite("Canvas Resizing", function () { + visualTest( + "Main canvas drawing after resize", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + // Resize the canvas + p5.resizeCanvas(30, 30); + // Draw to the main canvas after resize + p5.background(100, 0, 100); + p5.fill(0, 255, 255); + p5.noStroke(); + p5.circle(0, 0, 20); + await screenshot(); + }, + ); + }); + + visualSuite("Framebuffers", function () { + visualTest( + "Basic framebuffer draw to canvas", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + // Create a framebuffer + const fbo = p5.createFramebuffer({ width: 25, height: 25 }); + + // Draw to the framebuffer + fbo.draw(() => { + p5.background(255, 0, 0); // Red background + p5.fill(0, 255, 0); // Green circle + p5.noStroke(); + p5.circle(0, 0, 20); + }); + + // Draw the framebuffer to the main canvas + p5.background(0, 0, 255); // Blue background + p5.texture(fbo); + p5.noStroke(); + p5.plane(25, 25); + + await screenshot(); + }, + ); + + visualTest( + "Framebuffer with different sizes", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + // Create two different sized framebuffers + const fbo1 = p5.createFramebuffer({ width: 20, height: 20 }); + const fbo2 = p5.createFramebuffer({ width: 15, height: 15 }); + + // Draw to first framebuffer + fbo1.draw(() => { + p5.background(255, 100, 100); + p5.fill(255, 255, 0); + p5.noStroke(); + p5.rect(-5, -5, 10, 10); + }); + + // Draw to second framebuffer + fbo2.draw(() => { + p5.background(100, 255, 100); + p5.fill(255, 0, 255); + p5.noStroke(); + p5.circle(0, 0, 10); + }); + + // Draw both to main canvas + p5.background(50); + p5.push(); + p5.translate(-12.5, -12.5); + p5.texture(fbo1); + p5.noStroke(); + p5.plane(20, 20); + p5.pop(); + + p5.push(); + p5.translate(12.5, 12.5); + p5.texture(fbo2); + p5.noStroke(); + p5.plane(15, 15); + p5.pop(); + + await screenshot(); + }, + ); + + visualTest("Auto-sized framebuffer", async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + // Create auto-sized framebuffer (should match canvas size) + const fbo = p5.createFramebuffer(); + + // Draw to the framebuffer + fbo.draw(() => { + p5.background(0); + p5.translate(-fbo.width / 2, -fbo.height / 2) + p5.stroke(255); + p5.strokeWeight(2); + p5.noFill(); + // Draw a grid pattern to verify size + for (let x = 0; x < 50; x += 10) { + p5.line(x, 0, x, 50); + } + for (let y = 0; y < 50; y += 10) { + p5.line(0, y, 50, y); + } + p5.fill(255, 0, 0); + p5.noStroke(); + p5.circle(25, 25, 15); + }); + + // Draw the framebuffer to fill the main canvas + p5.texture(fbo); + p5.noStroke(); + p5.plane(50, 50); + + await screenshot(); + }); + + visualTest( + "Auto-sized framebuffer after canvas resize", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + // Create auto-sized framebuffer + const fbo = p5.createFramebuffer(); + + // Resize the canvas (framebuffer should auto-resize) + p5.resizeCanvas(30, 30); + + // Draw to the framebuffer after resize + fbo.draw(() => { + p5.background(100, 0, 100); + p5.translate(-fbo.width / 2, -fbo.height / 2) + p5.fill(0, 255, 255); + p5.noStroke(); + // Draw a shape that fills the new size + p5.rect(5, 5, 20, 20); + p5.fill(255, 255, 0); + p5.circle(15, 15, 10); + }); + + // Draw the framebuffer to the main canvas + p5.texture(fbo); + p5.noStroke(); + p5.plane(30, 30); + + await screenshot(); + }, + ); + + visualTest( + "Fixed-size framebuffer after manual resize", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + // Create fixed-size framebuffer + const fbo = p5.createFramebuffer({ width: 20, height: 20 }); + + // Draw initial content + fbo.draw(() => { + p5.background(255, 200, 100); + p5.fill(0, 100, 200); + p5.noStroke(); + p5.circle(0, 0, 15); + }); + + // Manually resize the framebuffer + fbo.resize(35, 25); + + // Draw new content to the resized framebuffer + fbo.draw(() => { + p5.background(200, 255, 100); + p5.translate(-fbo.width / 2, -fbo.height / 2) + p5.fill(200, 0, 100); + p5.noStroke(); + // Draw content that uses the new size + p5.rect(5, 5, 25, 15); + p5.fill(0, 0, 255); + p5.circle(17.5, 12.5, 8); + }); + + // Draw the resized framebuffer to the main canvas + p5.background(50); + p5.texture(fbo); + p5.noStroke(); + p5.plane(35, 25); + + await screenshot(); + }, + ); + }); +}); diff --git a/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/000.png b/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/000.png new file mode 100644 index 0000000000..96849ce04c Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/metadata.json b/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png new file mode 100644 index 0000000000..01be2eb74e Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/000.png new file mode 100644 index 0000000000..c3171b7360 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/000.png new file mode 100644 index 0000000000..bd7facce45 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/000.png new file mode 100644 index 0000000000..1fb817b6b5 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/000.png new file mode 100644 index 0000000000..155638a0c8 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/Shader hooks can be used/000.png b/test/unit/visual/screenshots/WebGPU/Shaders/Shader hooks can be used/000.png new file mode 100644 index 0000000000..f883a461b5 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Shaders/Shader hooks can be used/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/Shader hooks can be used/metadata.json b/test/unit/visual/screenshots/WebGPU/Shaders/Shader hooks can be used/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Shaders/Shader hooks can be used/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/Textures in the material shader work/000.png b/test/unit/visual/screenshots/WebGPU/Shaders/Textures in the material shader work/000.png new file mode 100644 index 0000000000..b0e4b614b3 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Shaders/Textures in the material shader work/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/Textures in the material shader work/metadata.json b/test/unit/visual/screenshots/WebGPU/Shaders/Textures in the material shader work/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Shaders/Textures in the material shader work/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/The color shader runs successfully/000.png b/test/unit/visual/screenshots/WebGPU/Shaders/The color shader runs successfully/000.png new file mode 100644 index 0000000000..ef51c5937c Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Shaders/The color shader runs successfully/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/The color shader runs successfully/metadata.json b/test/unit/visual/screenshots/WebGPU/Shaders/The color shader runs successfully/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Shaders/The color shader runs successfully/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/000.png b/test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/000.png new file mode 100644 index 0000000000..7bc3da3ae0 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/metadata.json b/test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/000.png b/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/000.png new file mode 100644 index 0000000000..cd8d813075 Binary files /dev/null and b/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/000.png differ diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/metadata.json b/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/visualTest.js b/test/unit/visual/visualTest.js index 03e1e05a4a..0c9c4b9a74 100644 --- a/test/unit/visual/visualTest.js +++ b/test/unit/visual/visualTest.js @@ -429,8 +429,8 @@ export function visualTest( const actual = []; // Generate screenshots - await callback(myp5, () => { - const img = myp5.get(); + await callback(myp5, async () => { + const img = await myp5.get(); img.pixelDensity(1); actual.push(img); }); diff --git a/test/unit/webgl/light.js b/test/unit/webgl/light.js index add543c058..15fad185e1 100644 --- a/test/unit/webgl/light.js +++ b/test/unit/webgl/light.js @@ -76,6 +76,7 @@ suite('light', function() { }); suite('spotlight inputs', function() { + beforeEach(() => myp5.noLights()); let angle = Math.PI / 4; let defaultAngle = Math.cos(Math.PI / 3); let cosAngle = Math.cos(angle); diff --git a/test/unit/webgl/p5.Framebuffer.js b/test/unit/webgl/p5.Framebuffer.js index 57799b7dc5..ceea6dfc6c 100644 --- a/test/unit/webgl/p5.Framebuffer.js +++ b/test/unit/webgl/p5.Framebuffer.js @@ -156,7 +156,7 @@ suite('p5.Framebuffer', function() { expect(fbo.density).to.equal(1); // The texture should not be recreated - expect(fbo.color.rawTexture()).to.equal(oldTexture); + expect(fbo.color.rawTexture().texture).to.equal(oldTexture.texture); }); test('manually-sized framebuffers can be made auto-sized', function() { @@ -216,7 +216,7 @@ suite('p5.Framebuffer', function() { expect(fbo.density).to.equal(2); // The texture should not be recreated - expect(fbo.color.rawTexture()).to.equal(oldTexture); + expect(fbo.color.rawTexture().texture).to.equal(oldTexture.texture); }); test('resizes the framebuffer by createFramebuffer based on max texture size', function() { @@ -461,7 +461,7 @@ suite('p5.Framebuffer', function() { } }); - test('get() creates a p5.Image with 1x pixel density', function() { + test('get() creates a p5.Image matching the source pixel density', function() { const mainCanvas = myp5.createCanvas(20, 20, myp5.WEBGL); myp5.pixelDensity(2); const fbo = myp5.createFramebuffer(); @@ -482,22 +482,17 @@ suite('p5.Framebuffer', function() { myp5.pop(); }); const img = fbo.get(); - const p2d = myp5.createGraphics(20, 20); - p2d.pixelDensity(1); myp5.image(fbo, -10, -10); - p2d.image(mainCanvas, 0, 0); fbo.loadPixels(); img.loadPixels(); - p2d.loadPixels(); expect(img.width).to.equal(fbo.width); expect(img.height).to.equal(fbo.height); - expect(img.pixels.length).to.equal(fbo.pixels.length / 4); - // The pixels should be approximately the same in the 1x image as when we - // draw the framebuffer onto a 1x canvas + expect(img.pixels.length).to.equal(fbo.pixels.length); + // The pixels should be approximately the same as the framebuffer's for (let i = 0; i < img.pixels.length; i++) { - expect(img.pixels[i]).to.be.closeTo(p2d.pixels[i], 2); + expect(img.pixels[i]).to.be.closeTo(fbo.pixels[i], 2); } }); }); @@ -638,10 +633,10 @@ suite('p5.Framebuffer', function() { }); assert.equal( - fbo.color.framebuffer.colorP5Texture.glMinFilter, fbo.gl.NEAREST + fbo.color.framebuffer.colorP5Texture.minFilter, myp5.NEAREST ); assert.equal( - fbo.color.framebuffer.colorP5Texture.glMagFilter, fbo.gl.NEAREST + fbo.color.framebuffer.colorP5Texture.magFilter, myp5.NEAREST ); }); test('can create a framebuffer that uses LINEAR texture filtering', @@ -651,10 +646,10 @@ suite('p5.Framebuffer', function() { const fbo = myp5.createFramebuffer({}); assert.equal( - fbo.color.framebuffer.colorP5Texture.glMinFilter, fbo.gl.LINEAR + fbo.color.framebuffer.colorP5Texture.minFilter, myp5.LINEAR ); assert.equal( - fbo.color.framebuffer.colorP5Texture.glMagFilter, fbo.gl.LINEAR + fbo.color.framebuffer.colorP5Texture.magFilter, myp5.LINEAR ); }); }); diff --git a/test/unit/webgl/p5.RendererGL.js b/test/unit/webgl/p5.RendererGL.js index 7b84728695..a5f3e16b98 100644 --- a/test/unit/webgl/p5.RendererGL.js +++ b/test/unit/webgl/p5.RendererGL.js @@ -107,10 +107,9 @@ suite('p5.RendererGL', function() { // Make a red texture const tex = myp5.createFramebuffer(); tex.draw(() => myp5.background('red')); - console.log(tex.get().canvas.toDataURL()); myp5.shader(myShader); - myp5.fill('red'); + myp5.fill('blue') myp5.noStroke(); myShader.setUniform('myTex', tex); diff --git a/test/unit/webgl/p5.Shader.js b/test/unit/webgl/p5.Shader.js index 11a5797906..3ea9b07876 100644 --- a/test/unit/webgl/p5.Shader.js +++ b/test/unit/webgl/p5.Shader.js @@ -58,7 +58,6 @@ suite('p5.Shader', function() { 'uModelViewMatrix', 'uProjectionMatrix', 'uNormalMatrix', - 'uAmbientLightCount', 'uDirectionalLightCount', 'uPointLightCount', 'uAmbientColor', diff --git a/test/unit/webgl/p5.Texture.js b/test/unit/webgl/p5.Texture.js index 80512f0e49..60058b302d 100644 --- a/test/unit/webgl/p5.Texture.js +++ b/test/unit/webgl/p5.Texture.js @@ -67,6 +67,13 @@ suite('p5.Texture', function() { }; suite('p5.Texture', function() { + let texParamSpy; + beforeEach(() => { + texParamSpy = vi.spyOn(myp5._renderer.GL, 'texParameteri'); + }); + afterEach(() => { + vi.restoreAllMocks(); + }); test('Create and cache a single texture with p5.Image', function() { testTextureSet(texImg1); }); @@ -79,56 +86,57 @@ suite('p5.Texture', function() { test('Set filter mode to linear', function() { var tex = myp5._renderer.getTexture(texImg2); tex.setInterpolation(myp5.LINEAR, myp5.LINEAR); - assert.deepEqual(tex.glMinFilter, myp5._renderer.GL.LINEAR); - assert.deepEqual(tex.glMagFilter, myp5._renderer.GL.LINEAR); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_MIN_FILTER, myp5._renderer.GL.LINEAR); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_MAG_FILTER, myp5._renderer.GL.LINEAR); }); test('Set filter mode to nearest', function() { var tex = myp5._renderer.getTexture(texImg2); tex.setInterpolation(myp5.NEAREST, myp5.NEAREST); - assert.deepEqual(tex.glMinFilter, myp5._renderer.GL.NEAREST); - assert.deepEqual(tex.glMagFilter, myp5._renderer.GL.NEAREST); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_MIN_FILTER, myp5._renderer.GL.NEAREST); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_MAG_FILTER, myp5._renderer.GL.NEAREST); }); test('Set wrap mode to clamp', function() { var tex = myp5._renderer.getTexture(texImg2); tex.setWrapMode(myp5.CLAMP, myp5.CLAMP); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.CLAMP_TO_EDGE); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.CLAMP_TO_EDGE); }); test('Set wrap mode to repeat', function() { var tex = myp5._renderer.getTexture(texImg2); tex.setWrapMode(myp5.REPEAT, myp5.REPEAT); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.REPEAT); }); test('Set wrap mode to mirror', function() { var tex = myp5._renderer.getTexture(texImg2); tex.setWrapMode(myp5.MIRROR, myp5.MIRROR); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.MIRRORED_REPEAT); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.MIRRORED_REPEAT); }); test('Set wrap mode REPEAT if src dimensions is powerOfTwo', function() { const tex = myp5._renderer.getTexture(imgElementPowerOfTwo); tex.setWrapMode(myp5.REPEAT, myp5.REPEAT); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.REPEAT); }); test( 'Set default wrap mode REPEAT if WEBGL2 and src dimensions != powerOfTwo', function() { const tex = myp5._renderer.getTexture(imgElementNotPowerOfTwo); tex.setWrapMode(myp5.REPEAT, myp5.REPEAT); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.REPEAT); } ); test( 'Set default wrap mode CLAMP if WEBGL1 and src dimensions != powerOfTwo', function() { myp5.setAttributes({ version: 1 }); + texParamSpy = vi.spyOn(myp5._renderer.GL, 'texParameteri'); const tex = myp5._renderer.getTexture(imgElementNotPowerOfTwo); tex.setWrapMode(myp5.REPEAT, myp5.REPEAT); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.CLAMP_TO_EDGE); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.CLAMP_TO_EDGE); } ); test('Set textureMode to NORMAL', function() { @@ -143,28 +151,28 @@ suite('p5.Texture', function() { myp5.textureWrap(myp5.CLAMP); var tex1 = myp5._renderer.getTexture(texImg1); var tex2 = myp5._renderer.getTexture(texImg2); - assert.deepEqual(tex1.glWrapS, myp5._renderer.GL.CLAMP_TO_EDGE); - assert.deepEqual(tex1.glWrapT, myp5._renderer.GL.CLAMP_TO_EDGE); - assert.deepEqual(tex2.glWrapS, myp5._renderer.GL.CLAMP_TO_EDGE); - assert.deepEqual(tex2.glWrapT, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.CLAMP_TO_EDGE); }); test('Set global wrap mode to repeat', function() { myp5.textureWrap(myp5.REPEAT); var tex1 = myp5._renderer.getTexture(texImg1); var tex2 = myp5._renderer.getTexture(texImg2); - assert.deepEqual(tex1.glWrapS, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex1.glWrapT, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex2.glWrapS, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex2.glWrapT, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.REPEAT); }); test('Set global wrap mode to mirror', function() { myp5.textureWrap(myp5.MIRROR); var tex1 = myp5._renderer.getTexture(texImg1); var tex2 = myp5._renderer.getTexture(texImg2); - assert.deepEqual(tex1.glWrapS, myp5._renderer.GL.MIRRORED_REPEAT); - assert.deepEqual(tex1.glWrapT, myp5._renderer.GL.MIRRORED_REPEAT); - assert.deepEqual(tex2.glWrapS, myp5._renderer.GL.MIRRORED_REPEAT); - assert.deepEqual(tex2.glWrapT, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.MIRRORED_REPEAT); }); test('Handles changes to p5.Image size', function() { const tex = myp5._renderer.getTexture(texImg2); diff --git a/test/unit/webgpu/p5.Framebuffer.js b/test/unit/webgpu/p5.Framebuffer.js new file mode 100644 index 0000000000..ccbadbc7a0 --- /dev/null +++ b/test/unit/webgpu/p5.Framebuffer.js @@ -0,0 +1,251 @@ +import p5 from '../../../src/app.js'; +import rendererWebGPU from "../../../src/webgpu/p5.RendererWebGPU"; + +p5.registerAddon(rendererWebGPU); + +suite('WebGPU p5.Framebuffer', function() { + let myp5; + let prevPixelRatio; + + beforeAll(async function() { + prevPixelRatio = window.devicePixelRatio; + window.devicePixelRatio = 1; + myp5 = new p5(function(p) { + p.setup = function() {}; + }); + }); + + beforeEach(async function() { + await myp5.createCanvas(10, 10, 'webgpu'); + }) + + afterAll(function() { + myp5.remove(); + window.devicePixelRatio = prevPixelRatio; + }); + + suite('Creation and basic properties', function() { + test('framebuffers can be created with WebGPU renderer', async function() { + const fbo = myp5.createFramebuffer(); + + expect(fbo).to.be.an('object'); + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + expect(fbo.autoSized()).to.equal(true); + }); + + test('framebuffers can be created with custom dimensions', async function() { + const fbo = myp5.createFramebuffer({ width: 20, height: 30 }); + + expect(fbo.width).to.equal(20); + expect(fbo.height).to.equal(30); + expect(fbo.autoSized()).to.equal(false); + }); + + test('framebuffers have color texture', async function() { + const fbo = myp5.createFramebuffer(); + + expect(fbo.color).to.be.an('object'); + expect(fbo.color.rawTexture).to.be.a('function'); + }); + + test('framebuffers can specify different formats', async function() { + const fbo = myp5.createFramebuffer({ + format: 'float', + channels: 'rgb' + }); + + expect(fbo).to.be.an('object'); + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + }); + }); + + suite('Auto-sizing behavior', function() { + test('auto-sized framebuffers change size with canvas', async function() { + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer(); + + expect(fbo.autoSized()).to.equal(true); + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + expect(fbo.density).to.equal(1); + + myp5.resizeCanvas(15, 20); + myp5.pixelDensity(2); + expect(fbo.width).to.equal(15); + expect(fbo.height).to.equal(20); + expect(fbo.density).to.equal(2); + }); + + test('manually-sized framebuffers do not change size with canvas', async function() { + myp5.pixelDensity(3); + const fbo = myp5.createFramebuffer({ width: 25, height: 30, density: 1 }); + + expect(fbo.autoSized()).to.equal(false); + expect(fbo.width).to.equal(25); + expect(fbo.height).to.equal(30); + expect(fbo.density).to.equal(1); + + myp5.resizeCanvas(5, 15); + myp5.pixelDensity(2); + expect(fbo.width).to.equal(25); + expect(fbo.height).to.equal(30); + expect(fbo.density).to.equal(1); + }); + + test('manually-sized framebuffers can be made auto-sized', async function() { + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer({ width: 25, height: 30, density: 2 }); + + expect(fbo.autoSized()).to.equal(false); + expect(fbo.width).to.equal(25); + expect(fbo.height).to.equal(30); + expect(fbo.density).to.equal(2); + + // Make it auto-sized + fbo.autoSized(true); + expect(fbo.autoSized()).to.equal(true); + + myp5.resizeCanvas(8, 12); + myp5.pixelDensity(3); + expect(fbo.width).to.equal(8); + expect(fbo.height).to.equal(12); + expect(fbo.density).to.equal(3); + }); + }); + + suite('Manual resizing', function() { + test('framebuffers can be manually resized', async function() { + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer(); + + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + expect(fbo.density).to.equal(1); + + fbo.resize(20, 25); + expect(fbo.width).to.equal(20); + expect(fbo.height).to.equal(25); + expect(fbo.autoSized()).to.equal(false); + }); + + test('resizing affects pixel density', async function() { + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer(); + + fbo.pixelDensity(3); + expect(fbo.density).to.equal(3); + + fbo.resize(15, 20); + fbo.pixelDensity(2); + expect(fbo.width).to.equal(15); + expect(fbo.height).to.equal(20); + expect(fbo.density).to.equal(2); + }); + }); + + suite('Drawing functionality', function() { + test('can draw to framebuffer with draw() method', async function() { + const fbo = myp5.createFramebuffer(); + + myp5.background(0, 255, 0); + + fbo.draw(() => { + myp5.background(0, 0, 255); + // myp5.fill(0, 255, 0); + }); + await myp5.loadPixels(); + // Drawing should have gone to the framebuffer, leaving the main + // canvas the same + expect([...myp5.pixels.slice(0, 3)]).toEqual([0, 255, 0]); + await fbo.loadPixels(); + // The framebuffer should have content + expect([...fbo.pixels.slice(0, 3)]).toEqual([0, 0, 255]); + + // The content can be drawn back to the main canvas + myp5.imageMode(myp5.CENTER); + myp5.image(fbo, 0, 0); + await myp5.loadPixels(); + expect([...fbo.pixels.slice(0, 3)]).toEqual([0, 0, 255]); + expect([...myp5.pixels.slice(0, 3)]).toEqual([0, 0, 255]); + }); + + test('can use framebuffer as texture', async function() { + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(255, 0, 0); + }); + + // Should not throw when used as texture + expect(() => { + myp5.texture(fbo); + myp5.plane(10, 10); + }).to.not.throw(); + }); + }); + + suite('Pixel access', function() { + test('loadPixels returns a promise in WebGPU', async function() { + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(255, 0, 0); + }); + + const result = fbo.loadPixels(); + expect(result).to.be.a('promise'); + + const pixels = await result; + expect(pixels).toBeInstanceOf(Uint8Array); + expect(pixels.length).to.equal(10 * 10 * 4); + expect([...pixels.slice(0, 4)]).toEqual([255, 0, 0, 255]); + }); + + test('pixels property is set after loadPixels resolves', async function() { + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(100, 150, 200); + }); + + const pixels = await fbo.loadPixels(); + expect(fbo.pixels).to.equal(pixels); + expect(fbo.pixels.length).to.equal(10 * 10 * 4); + }); + + test('get() returns a promise for single pixel in WebGPU', async function() { + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(100, 150, 200); + }); + + const result = fbo.get(5, 5); + expect(result).to.be.a('promise'); + + const color = await result; + expect(color).to.be.an('array'); + expect(color).to.have.length(4); + expect([...color]).toEqual([100, 150, 200, 255]); + }); + + test('get() returns a promise for region in WebGPU', async function() { + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(100, 150, 200); + }); + + const result = fbo.get(2, 2, 4, 4); + expect(result).to.be.a('promise'); + + const region = await result; + expect(region).to.be.an('object'); // Should be a p5.Image + expect(region.width).to.equal(4); + expect(region.height).to.equal(4); + expect([...region.pixels.slice(0, 4)]).toEqual([100, 150, 200, 255]); + }); + }); +}); diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 21d479b2d8..5cc3794706 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -5,7 +5,8 @@ const plugins = [ vitePluginString({ include: [ 'src/webgl/shaders/**/*' - ] + ], + compress: false, }) ]; @@ -21,7 +22,7 @@ export default defineWorkspace([ ] }, test: { - name: 'unit', + name: 'unit-tests', root: './', include: [ './test/unit/**/*.js' @@ -30,6 +31,8 @@ export default defineWorkspace([ './test/unit/spec.js', './test/unit/assets/**/*', './test/unit/visual/visualTest.js', + './test/unit/visual/cases/webgpu.js', + './test/unit/webgpu/*.js', './test/types/**/*' ], testTimeout: 1000, @@ -38,11 +41,79 @@ export default defineWorkspace([ enabled: true, name: 'chrome', provider: 'webdriverio', - screenshotFailures: false + screenshotFailures: false, + providerOptions: { + capabilities: process.env.CI ? { + 'goog:chromeOptions': { + args: [ + '--no-sandbox', + '--headless=new', + '--enable-unsafe-webgpu', + '--use-vulkan=swiftshader', + '--use-webgpu-adapter=swiftshader', + '--use-angle=vulkan', + '--no-sandbox', + ] + } + } : undefined + } }, fakeTimers: { toFake: [...(configDefaults.fakeTimers.toFake ?? []), 'performance'] } } - } + }, + { + plugins, + publicDir: './test', + bench: { + name: 'bench', + root: './', + include: [ + './test/bench/**/*.js' + ], + }, + test: { + name: 'unit-tests-webgpu', + root: './', + include: [ + // './test/unit/**/*.js', + './test/unit/visual/cases/webgpu.js', + './test/unit/webgpu/*.js', + ], + exclude: [ + './test/unit/spec.js', + './test/unit/assets/**/*', + './test/unit/visual/visualTest.js', + // './test/unit/visual/cases/webgpu.js', + './test/types/**/*' + ], + testTimeout: 1000, + globals: true, + browser: { + enabled: true, + name: 'chrome', + provider: 'webdriverio', + screenshotFailures: false, + providerOptions: { + capabilities: process.env.CI ? { + 'goog:chromeOptions': { + args: [ + '--no-sandbox', + '--headless=new', + '--enable-unsafe-webgpu', + '--use-vulkan=swiftshader', + '--use-webgpu-adapter=swiftshader', + '--use-angle=vulkan', + '--no-sandbox', + ] + } + } : undefined + } + }, + fakeTimers: { + toFake: [...(configDefaults.fakeTimers.toFake ?? []), 'performance'] + } + } + }, ]); \ No newline at end of file