Merge pull request #13461 from jahow/webgl-shape-renderer

WebGL vector renderer for polygons, lines and points
This commit is contained in:
Olivier Guyot
2022-07-22 10:05:31 +02:00
committed by GitHub
27 changed files with 3996 additions and 518 deletions

View File

@@ -0,0 +1,201 @@
/**
* @module ol/render/webgl/BatchRenderer
*/
import {WebGLWorkerMessageType} from './constants.js';
import {abstract} from '../../util.js';
import {
create as createTransform,
makeInverse as makeInverseTransform,
multiply as multiplyTransform,
translate as translateTransform,
} from '../../transform.js';
/**
* @typedef {Object} CustomAttribute A description of a custom attribute to be passed on to the GPU, with a value different
* for each feature.
* @property {string} name Attribute name.
* @property {function(import("../../Feature").default):number} callback This callback computes the numerical value of the
* attribute for a given feature.
*/
let workerMessageCounter = 0;
/**
* @classdesc Abstract class for batch renderers.
* Batch renderers are meant to render the geometries contained in a {@link module:ol/render/webgl/GeometryBatch}
* instance. They are responsible for generating render instructions and transforming them into WebGL buffers.
*/
class AbstractBatchRenderer {
/**
* @param {import("../../webgl/Helper.js").default} helper WebGL helper instance
* @param {Worker} worker WebGL worker instance
* @param {string} vertexShader Vertex shader
* @param {string} fragmentShader Fragment shader
* @param {Array<CustomAttribute>} customAttributes List of custom attributes
*/
constructor(helper, worker, vertexShader, fragmentShader, customAttributes) {
/**
* @type {import("../../webgl/Helper.js").default}
* @private
*/
this.helper_ = helper;
/**
* @type {Worker}
* @private
*/
this.worker_ = worker;
/**
* @type {WebGLProgram}
* @private
*/
this.program_ = this.helper_.getProgram(fragmentShader, vertexShader);
/**
* A list of attributes used by the renderer.
* @type {Array<import('../../webgl/Helper.js').AttributeDescription>}
* @protected
*/
this.attributes = [];
/**
* @type {Array<CustomAttribute>}
* @protected
*/
this.customAttributes = customAttributes;
}
/**
* Rebuild rendering instructions and webgl buffers based on the provided frame state
* Note: this is a costly operation.
* @param {import("./MixedGeometryBatch.js").GeometryBatch} batch Geometry batch
* @param {import("../../PluggableMap").FrameState} frameState Frame state.
* @param {import("../../geom/Geometry.js").Type} geometryType Geometry type
* @param {function(): void} callback Function called once the render buffers are updated
*/
rebuild(batch, frameState, geometryType, callback) {
// store transform for rendering instructions
batch.renderInstructionsTransform = this.helper_.makeProjectionTransform(
frameState,
createTransform()
);
this.generateRenderInstructions(batch);
this.generateBuffers_(batch, geometryType, callback);
}
/**
* Render the geometries in the batch. This will also update the current transform used for rendering according to
* the invert transform of the webgl buffers
* @param {import("./MixedGeometryBatch.js").GeometryBatch} batch Geometry batch
* @param {import("../../transform.js").Transform} currentTransform Transform
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @param {number} offsetX X offset
*/
render(batch, currentTransform, frameState, offsetX) {
// multiply the current projection transform with the invert of the one used to fill buffers
this.helper_.makeProjectionTransform(frameState, currentTransform);
translateTransform(currentTransform, offsetX, 0);
multiplyTransform(currentTransform, batch.invertVerticesBufferTransform);
// enable program, buffers and attributes
this.helper_.useProgram(this.program_, frameState);
this.helper_.bindBuffer(batch.verticesBuffer);
this.helper_.bindBuffer(batch.indicesBuffer);
this.helper_.enableAttributes(this.attributes);
const renderCount = batch.indicesBuffer.getSize();
this.helper_.drawElements(0, renderCount);
}
/**
* Rebuild rendering instructions based on the provided frame state
* This is specific to the geometry type and has to be implemented by subclasses.
* @param {import("./MixedGeometryBatch.js").GeometryBatch} batch Geometry batch
* @protected
*/
generateRenderInstructions(batch) {
abstract();
}
/**
* Rebuild internal webgl buffers for rendering based on the current rendering instructions;
* This is asynchronous: webgl buffers wil _not_ be updated right away
* @param {import("./MixedGeometryBatch.js").GeometryBatch} batch Geometry batch
* @param {import("../../geom/Geometry.js").Type} geometryType Geometry type
* @param {function(): void} callback Function called once the render buffers are updated
* @private
*/
generateBuffers_(batch, geometryType, callback) {
const messageId = workerMessageCounter++;
let messageType;
switch (geometryType) {
case 'Polygon':
messageType = WebGLWorkerMessageType.GENERATE_POLYGON_BUFFERS;
break;
case 'Point':
messageType = WebGLWorkerMessageType.GENERATE_POINT_BUFFERS;
break;
case 'LineString':
messageType = WebGLWorkerMessageType.GENERATE_LINE_STRING_BUFFERS;
break;
default:
// pass
}
/** @type {import('./constants.js').WebGLWorkerGenerateBuffersMessage} */
const message = {
id: messageId,
type: messageType,
renderInstructions: batch.renderInstructions.buffer,
renderInstructionsTransform: batch.renderInstructionsTransform,
customAttributesCount: this.customAttributes.length,
};
this.worker_.postMessage(message, [batch.renderInstructions.buffer]);
// leave ownership of render instructions
batch.renderInstructions = null;
const handleMessage =
/**
* @param {*} event Event.
* @this {AbstractBatchRenderer}
*/
function (event) {
const received = event.data;
// this is not the response to our request: skip
if (received.id !== messageId) {
return;
}
// we've received our response: stop listening
this.worker_.removeEventListener('message', handleMessage);
// store transform & invert transform for webgl buffers
batch.verticesBufferTransform = received.renderInstructionsTransform;
makeInverseTransform(
batch.invertVerticesBufferTransform,
batch.verticesBufferTransform
);
// copy & flush received buffers to GPU
batch.verticesBuffer.fromArrayBuffer(received.vertexBuffer);
this.helper_.flushBufferData(batch.verticesBuffer);
batch.indicesBuffer.fromArrayBuffer(received.indexBuffer);
this.helper_.flushBufferData(batch.indicesBuffer);
// take back ownership of the render instructions for further use
batch.renderInstructions = new Float32Array(
received.renderInstructions
);
callback();
}.bind(this);
this.worker_.addEventListener('message', handleMessage);
}
}
export default AbstractBatchRenderer;

View File

@@ -0,0 +1,116 @@
/**
* @module ol/render/webgl/LineStringBatchRenderer
*/
import AbstractBatchRenderer from './BatchRenderer.js';
import {AttributeType} from '../../webgl/Helper.js';
import {transform2D} from '../../geom/flat/transform.js';
/**
* Names of attributes made available to the vertex shader.
* Please note: changing these *will* break custom shaders!
* @enum {string}
*/
export const Attributes = {
SEGMENT_START: 'a_segmentStart',
SEGMENT_END: 'a_segmentEnd',
PARAMETERS: 'a_parameters',
};
class LineStringBatchRenderer extends AbstractBatchRenderer {
/**
* @param {import("../../webgl/Helper.js").default} helper WebGL helper instance
* @param {Worker} worker WebGL worker instance
* @param {string} vertexShader Vertex shader
* @param {string} fragmentShader Fragment shader
* @param {Array<import('./BatchRenderer.js').CustomAttribute>} customAttributes List of custom attributes
*/
constructor(helper, worker, vertexShader, fragmentShader, customAttributes) {
super(helper, worker, vertexShader, fragmentShader, customAttributes);
// vertices for lines must hold both a position (x,y) and an offset (dx,dy)
this.attributes = [
{
name: Attributes.SEGMENT_START,
size: 2,
type: AttributeType.FLOAT,
},
{
name: Attributes.SEGMENT_END,
size: 2,
type: AttributeType.FLOAT,
},
{
name: Attributes.PARAMETERS,
size: 1,
type: AttributeType.FLOAT,
},
].concat(
customAttributes.map(function (attribute) {
return {
name: 'a_' + attribute.name,
size: 1,
type: AttributeType.FLOAT,
};
})
);
}
/**
* Render instructions for lines are structured like so:
* [ customAttr0, ... , customAttrN, numberOfVertices0, x0, y0, ... , xN, yN, numberOfVertices1, ... ]
* @param {import("./MixedGeometryBatch.js").LineStringGeometryBatch} batch Linestring geometry batch
* @override
*/
generateRenderInstructions(batch) {
// here we anticipate the amount of render instructions for lines:
// 2 instructions per vertex for position (x and y)
// + 1 instruction per line per custom attributes
// + 1 instruction per line (for vertices count)
const totalInstructionsCount =
2 * batch.verticesCount +
(1 + this.customAttributes.length) * batch.geometriesCount;
if (
!batch.renderInstructions ||
batch.renderInstructions.length !== totalInstructionsCount
) {
batch.renderInstructions = new Float32Array(totalInstructionsCount);
}
// loop on features to fill the render instructions
let batchEntry;
const flatCoords = [];
let renderIndex = 0;
let value;
for (const featureUid in batch.entries) {
batchEntry = batch.entries[featureUid];
for (let i = 0, ii = batchEntry.flatCoordss.length; i < ii; i++) {
flatCoords.length = batchEntry.flatCoordss[i].length;
transform2D(
batchEntry.flatCoordss[i],
0,
flatCoords.length,
2,
batch.renderInstructionsTransform,
flatCoords
);
// custom attributes
for (let k = 0, kk = this.customAttributes.length; k < kk; k++) {
value = this.customAttributes[k].callback(batchEntry.feature);
batch.renderInstructions[renderIndex++] = value;
}
// vertices count
batch.renderInstructions[renderIndex++] = flatCoords.length / 2;
// looping on points for positions
for (let j = 0, jj = flatCoords.length; j < jj; j += 2) {
batch.renderInstructions[renderIndex++] = flatCoords[j];
batch.renderInstructions[renderIndex++] = flatCoords[j + 1];
}
}
}
}
}
export default LineStringBatchRenderer;

View File

@@ -0,0 +1,364 @@
/**
* @module ol/render/webgl/MixedGeometryBatch
*/
import WebGLArrayBuffer from '../../webgl/Buffer.js';
import {ARRAY_BUFFER, DYNAMIC_DRAW, ELEMENT_ARRAY_BUFFER} from '../../webgl.js';
import {create as createTransform} from '../../transform.js';
import {getUid} from '../../util.js';
/**
* @typedef {Object} GeometryBatchItem Object that holds a reference to a feature as well as the raw coordinates of its various geometries
* @property {import("../../Feature").default} feature Feature
* @property {Array<Array<number>>} flatCoordss Array of flat coordinates arrays, one for each geometry related to the feature
* @property {number} [verticesCount] Only defined for linestring and polygon batches
* @property {number} [ringsCount] Only defined for polygon batches
* @property {Array<Array<number>>} [ringsVerticesCounts] Array of vertices counts in each ring for each geometry; only defined for polygons batches
*/
/**
* @typedef {PointGeometryBatch|LineStringGeometryBatch|PolygonGeometryBatch} GeometryBatch
*/
/**
* @typedef {Object} PolygonGeometryBatch A geometry batch specific to polygons
* @property {Object<string, GeometryBatchItem>} entries Dictionary of all entries in the batch with associated computed values.
* One entry corresponds to one feature. Key is feature uid.
* @property {number} geometriesCount Amount of geometries in the batch.
* @property {Float32Array} renderInstructions Render instructions for polygons are structured like so:
* [ numberOfRings, numberOfVerticesInRing0, ..., numberOfVerticesInRingN, x0, y0, customAttr0, ..., xN, yN, customAttrN, numberOfRings,... ]
* @property {WebGLArrayBuffer} verticesBuffer Vertices WebGL buffer
* @property {WebGLArrayBuffer} indicesBuffer Indices WebGL buffer
* @property {import("../../transform.js").Transform} renderInstructionsTransform Converts world space coordinates to screen space; applies to the rendering instructions
* @property {import("../../transform.js").Transform} verticesBufferTransform Converts world space coordinates to screen space; applies to the webgl vertices buffer
* @property {import("../../transform.js").Transform} invertVerticesBufferTransform Screen space to world space; applies to the webgl vertices buffer
* @property {number} verticesCount Amount of vertices from geometries in the batch.
* @property {number} ringsCount How many outer and inner rings in this batch.
*/
/**
* @typedef {Object} LineStringGeometryBatch A geometry batch specific to lines
* @property {Object<string, GeometryBatchItem>} entries Dictionary of all entries in the batch with associated computed values.
* One entry corresponds to one feature. Key is feature uid.
* @property {number} geometriesCount Amount of geometries in the batch.
* @property {Float32Array} renderInstructions Render instructions for polygons are structured like so:
* [ numberOfRings, numberOfVerticesInRing0, ..., numberOfVerticesInRingN, x0, y0, customAttr0, ..., xN, yN, customAttrN, numberOfRings,... ]
* @property {WebGLArrayBuffer} verticesBuffer Vertices WebGL buffer
* @property {WebGLArrayBuffer} indicesBuffer Indices WebGL buffer
* @property {import("../../transform.js").Transform} renderInstructionsTransform Converts world space coordinates to screen space; applies to the rendering instructions
* @property {import("../../transform.js").Transform} verticesBufferTransform Converts world space coordinates to screen space; applies to the webgl vertices buffer
* @property {import("../../transform.js").Transform} invertVerticesBufferTransform Screen space to world space; applies to the webgl vertices buffer
* @property {number} verticesCount Amount of vertices from geometries in the batch.
*/
/**
* @typedef {Object} PointGeometryBatch A geometry batch specific to points
* @property {Object<string, GeometryBatchItem>} entries Dictionary of all entries in the batch with associated computed values.
* One entry corresponds to one feature. Key is feature uid.
* @property {number} geometriesCount Amount of geometries in the batch.
* @property {Float32Array} renderInstructions Render instructions for polygons are structured like so:
* [ numberOfRings, numberOfVerticesInRing0, ..., numberOfVerticesInRingN, x0, y0, customAttr0, ..., xN, yN, customAttrN, numberOfRings,... ]
* @property {WebGLArrayBuffer} verticesBuffer Vertices WebGL buffer
* @property {WebGLArrayBuffer} indicesBuffer Indices WebGL buffer
* @property {import("../../transform.js").Transform} renderInstructionsTransform Converts world space coordinates to screen space; applies to the rendering instructions
* @property {import("../../transform.js").Transform} verticesBufferTransform Converts world space coordinates to screen space; applies to the webgl vertices buffer
* @property {import("../../transform.js").Transform} invertVerticesBufferTransform Screen space to world space; applies to the webgl vertices buffer
*/
/**
* @classdesc This class is used to group several geometries of various types together for faster rendering.
* Three inner batches are maintained for polygons, lines and points. Each time a feature is added, changed or removed
* from the batch, these inner batches are modified accordingly in order to keep them up-to-date.
*
* A feature can be present in several inner batches, for example a polygon geometry will be present in the polygon batch
* and its linar rings will be present in the line batch. Multi geometries are also broken down into individual geometries
* and added to the corresponding batches in a recursive manner.
*
* Corresponding {@link module:ol/render/webgl/BatchRenderer} instances are then used to generate the render instructions
* and WebGL buffers (vertices and indices) for each inner batches; render instructions are stored on the inner batches,
* alongside the transform used to convert world coords to screen coords at the time these instructions were generated.
* The resulting WebGL buffers are stored on the batches as well.
*
* An important aspect of geometry batches is that there is no guarantee that render instructions and WebGL buffers
* are synchronized, i.e. render instructions can describe a new state while WebGL buffers might not have been written yet.
* This is why two world-to-screen transforms are stored on each batch: one for the render instructions and one for
* the WebGL buffers.
*/
class MixedGeometryBatch {
constructor() {
/**
* @type {PolygonGeometryBatch}
*/
this.polygonBatch = {
entries: {},
geometriesCount: 0,
verticesCount: 0,
ringsCount: 0,
renderInstructions: new Float32Array(0),
verticesBuffer: new WebGLArrayBuffer(ARRAY_BUFFER, DYNAMIC_DRAW),
indicesBuffer: new WebGLArrayBuffer(ELEMENT_ARRAY_BUFFER, DYNAMIC_DRAW),
renderInstructionsTransform: createTransform(),
verticesBufferTransform: createTransform(),
invertVerticesBufferTransform: createTransform(),
};
/**
* @type {PointGeometryBatch}
*/
this.pointBatch = {
entries: {},
geometriesCount: 0,
renderInstructions: new Float32Array(0),
verticesBuffer: new WebGLArrayBuffer(ARRAY_BUFFER, DYNAMIC_DRAW),
indicesBuffer: new WebGLArrayBuffer(ELEMENT_ARRAY_BUFFER, DYNAMIC_DRAW),
renderInstructionsTransform: createTransform(),
verticesBufferTransform: createTransform(),
invertVerticesBufferTransform: createTransform(),
};
/**
* @type {LineStringGeometryBatch}
*/
this.lineStringBatch = {
entries: {},
geometriesCount: 0,
verticesCount: 0,
renderInstructions: new Float32Array(0),
verticesBuffer: new WebGLArrayBuffer(ARRAY_BUFFER, DYNAMIC_DRAW),
indicesBuffer: new WebGLArrayBuffer(ELEMENT_ARRAY_BUFFER, DYNAMIC_DRAW),
renderInstructionsTransform: createTransform(),
verticesBufferTransform: createTransform(),
invertVerticesBufferTransform: createTransform(),
};
}
/**
* @param {Array<import("../../Feature").default>} features Array of features to add to the batch
*/
addFeatures(features) {
for (let i = 0; i < features.length; i++) {
this.addFeature(features[i]);
}
}
/**
* @param {import("../../Feature").default} feature Feature to add to the batch
*/
addFeature(feature) {
const geometry = feature.getGeometry();
if (!geometry) {
return;
}
this.addGeometry_(geometry, feature);
}
/**
* @param {import("../../Feature").default} feature Feature
* @return {GeometryBatchItem} Batch item added (or existing one)
* @private
*/
addFeatureEntryInPointBatch_(feature) {
const uid = getUid(feature);
if (!(uid in this.pointBatch.entries)) {
this.pointBatch.entries[uid] = {
feature: feature,
flatCoordss: [],
};
}
return this.pointBatch.entries[uid];
}
/**
* @param {import("../../Feature").default} feature Feature
* @return {GeometryBatchItem} Batch item added (or existing one)
* @private
*/
addFeatureEntryInLineStringBatch_(feature) {
const uid = getUid(feature);
if (!(uid in this.lineStringBatch.entries)) {
this.lineStringBatch.entries[uid] = {
feature: feature,
flatCoordss: [],
verticesCount: 0,
};
}
return this.lineStringBatch.entries[uid];
}
/**
* @param {import("../../Feature").default} feature Feature
* @return {GeometryBatchItem} Batch item added (or existing one)
* @private
*/
addFeatureEntryInPolygonBatch_(feature) {
const uid = getUid(feature);
if (!(uid in this.polygonBatch.entries)) {
this.polygonBatch.entries[uid] = {
feature: feature,
flatCoordss: [],
verticesCount: 0,
ringsCount: 0,
ringsVerticesCounts: [],
};
}
return this.polygonBatch.entries[uid];
}
/**
* @param {import("../../Feature").default} feature Feature
* @private
*/
clearFeatureEntryInPointBatch_(feature) {
const entry = this.pointBatch.entries[getUid(feature)];
if (!entry) {
return;
}
this.pointBatch.geometriesCount -= entry.flatCoordss.length;
delete this.pointBatch.entries[getUid(feature)];
}
/**
* @param {import("../../Feature").default} feature Feature
* @private
*/
clearFeatureEntryInLineStringBatch_(feature) {
const entry = this.lineStringBatch.entries[getUid(feature)];
if (!entry) {
return;
}
this.lineStringBatch.verticesCount -= entry.verticesCount;
this.lineStringBatch.geometriesCount -= entry.flatCoordss.length;
delete this.lineStringBatch.entries[getUid(feature)];
}
/**
* @param {import("../../Feature").default} feature Feature
* @private
*/
clearFeatureEntryInPolygonBatch_(feature) {
const entry = this.polygonBatch.entries[getUid(feature)];
if (!entry) {
return;
}
this.polygonBatch.verticesCount -= entry.verticesCount;
this.polygonBatch.ringsCount -= entry.ringsCount;
this.polygonBatch.geometriesCount -= entry.flatCoordss.length;
delete this.polygonBatch.entries[getUid(feature)];
}
/**
* @param {import("../../geom").Geometry} geometry Geometry
* @param {import("../../Feature").default} feature Feature
* @private
*/
addGeometry_(geometry, feature) {
const type = geometry.getType();
let flatCoords;
let verticesCount;
let batchEntry;
switch (type) {
case 'GeometryCollection':
/** @type {import("../../geom").GeometryCollection} */ (geometry)
.getGeometries()
.map((geom) => this.addGeometry_(geom, feature));
break;
case 'MultiPolygon':
/** @type {import("../../geom").MultiPolygon} */ (geometry)
.getPolygons()
.map((polygon) => this.addGeometry_(polygon, feature));
break;
case 'MultiLineString':
/** @type {import("../../geom").MultiLineString} */ (geometry)
.getLineStrings()
.map((line) => this.addGeometry_(line, feature));
break;
case 'MultiPoint':
/** @type {import("../../geom").MultiPoint} */ (geometry)
.getPoints()
.map((point) => this.addGeometry_(point, feature));
break;
case 'Polygon':
const polygonGeom = /** @type {import("../../geom").Polygon} */ (
geometry
);
batchEntry = this.addFeatureEntryInPolygonBatch_(feature);
flatCoords = polygonGeom.getFlatCoordinates();
verticesCount = flatCoords.length / 2;
const ringsCount = polygonGeom.getLinearRingCount();
const ringsVerticesCount = polygonGeom
.getEnds()
.map((end, ind, arr) =>
ind > 0 ? (end - arr[ind - 1]) / 2 : end / 2
);
this.polygonBatch.verticesCount += verticesCount;
this.polygonBatch.ringsCount += ringsCount;
this.polygonBatch.geometriesCount++;
batchEntry.flatCoordss.push(flatCoords);
batchEntry.ringsVerticesCounts.push(ringsVerticesCount);
batchEntry.verticesCount += verticesCount;
batchEntry.ringsCount += ringsCount;
polygonGeom
.getLinearRings()
.map((ring) => this.addGeometry_(ring, feature));
break;
case 'Point':
const pointGeom = /** @type {import("../../geom").Point} */ (geometry);
batchEntry = this.addFeatureEntryInPointBatch_(feature);
flatCoords = pointGeom.getFlatCoordinates();
this.pointBatch.geometriesCount++;
batchEntry.flatCoordss.push(flatCoords);
break;
case 'LineString':
case 'LinearRing':
const lineGeom = /** @type {import("../../geom").LineString} */ (
geometry
);
batchEntry = this.addFeatureEntryInLineStringBatch_(feature);
flatCoords = lineGeom.getFlatCoordinates();
verticesCount = flatCoords.length / 2;
this.lineStringBatch.verticesCount += verticesCount;
this.lineStringBatch.geometriesCount++;
batchEntry.flatCoordss.push(flatCoords);
batchEntry.verticesCount += verticesCount;
break;
default:
// pass
}
}
/**
* @param {import("../../Feature").default} feature Feature
*/
changeFeature(feature) {
this.clearFeatureEntryInPointBatch_(feature);
this.clearFeatureEntryInPolygonBatch_(feature);
this.clearFeatureEntryInLineStringBatch_(feature);
const geometry = feature.getGeometry();
if (!geometry) {
return;
}
this.addGeometry_(geometry, feature);
}
/**
* @param {import("../../Feature").default} feature Feature
*/
removeFeature(feature) {
this.clearFeatureEntryInPointBatch_(feature);
this.clearFeatureEntryInPolygonBatch_(feature);
this.clearFeatureEntryInLineStringBatch_(feature);
}
clear() {
this.polygonBatch.entries = {};
this.polygonBatch.geometriesCount = 0;
this.polygonBatch.verticesCount = 0;
this.polygonBatch.ringsCount = 0;
this.lineStringBatch.entries = {};
this.lineStringBatch.geometriesCount = 0;
this.lineStringBatch.verticesCount = 0;
this.pointBatch.entries = {};
this.pointBatch.geometriesCount = 0;
}
}
export default MixedGeometryBatch;

View File

@@ -0,0 +1,97 @@
/**
* @module ol/render/webgl/PointBatchRenderer
*/
import AbstractBatchRenderer from './BatchRenderer.js';
import {AttributeType} from '../../webgl/Helper.js';
import {apply as applyTransform} from '../../transform.js';
/**
* Names of attributes made available to the vertex shader.
* Please note: changing these *will* break custom shaders!
* @enum {string}
*/
export const Attributes = {
POSITION: 'a_position',
INDEX: 'a_index',
};
class PointBatchRenderer extends AbstractBatchRenderer {
/**
* @param {import("../../webgl/Helper.js").default} helper WebGL helper instance
* @param {Worker} worker WebGL worker instance
* @param {string} vertexShader Vertex shader
* @param {string} fragmentShader Fragment shader
* @param {Array<import('./BatchRenderer.js').CustomAttribute>} customAttributes List of custom attributes
*/
constructor(helper, worker, vertexShader, fragmentShader, customAttributes) {
super(helper, worker, vertexShader, fragmentShader, customAttributes);
// vertices for point must hold both a position (x,y) and an index (their position in the quad)
this.attributes = [
{
name: Attributes.POSITION,
size: 2,
type: AttributeType.FLOAT,
},
{
name: Attributes.INDEX,
size: 1,
type: AttributeType.FLOAT,
},
].concat(
customAttributes.map(function (attribute) {
return {
name: 'a_' + attribute.name,
size: 1,
type: AttributeType.FLOAT,
};
})
);
}
/**
* Render instructions for lines are structured like so:
* [ x0, y0, customAttr0, ... , xN, yN, customAttrN ]
* @param {import("./MixedGeometryBatch.js").PointGeometryBatch} batch Point geometry batch
* @override
*/
generateRenderInstructions(batch) {
// here we anticipate the amount of render instructions for points:
// 2 instructions per vertex for position (x and y)
// + 1 instruction per vertex per custom attributes
const totalInstructionsCount =
(2 + this.customAttributes.length) * batch.geometriesCount;
if (
!batch.renderInstructions ||
batch.renderInstructions.length !== totalInstructionsCount
) {
batch.renderInstructions = new Float32Array(totalInstructionsCount);
}
// loop on features to fill the render instructions
let batchEntry;
const tmpCoords = [];
let renderIndex = 0;
let value;
for (const featureUid in batch.entries) {
batchEntry = batch.entries[featureUid];
for (let i = 0, ii = batchEntry.flatCoordss.length; i < ii; i++) {
tmpCoords[0] = batchEntry.flatCoordss[i][0];
tmpCoords[1] = batchEntry.flatCoordss[i][1];
applyTransform(batch.renderInstructionsTransform, tmpCoords);
batch.renderInstructions[renderIndex++] = tmpCoords[0];
batch.renderInstructions[renderIndex++] = tmpCoords[1];
// pushing custom attributes
for (let j = 0, jj = this.customAttributes.length; j < jj; j++) {
value = this.customAttributes[j].callback(batchEntry.feature);
batch.renderInstructions[renderIndex++] = value;
}
}
}
}
}
export default PointBatchRenderer;

View File

@@ -0,0 +1,117 @@
/**
* @module ol/render/webgl/PolygonBatchRenderer
*/
import AbstractBatchRenderer from './BatchRenderer.js';
import {AttributeType} from '../../webgl/Helper.js';
import {transform2D} from '../../geom/flat/transform.js';
/**
* Names of attributes made available to the vertex shader.
* Please note: changing these *will* break custom shaders!
* @enum {string}
*/
export const Attributes = {
POSITION: 'a_position',
};
class PolygonBatchRenderer extends AbstractBatchRenderer {
/**
* @param {import("../../webgl/Helper.js").default} helper WebGL helper instance
* @param {Worker} worker WebGL worker instance
* @param {string} vertexShader Vertex shader
* @param {string} fragmentShader Fragment shader
* @param {Array<import('./BatchRenderer.js').CustomAttribute>} customAttributes List of custom attributes
*/
constructor(helper, worker, vertexShader, fragmentShader, customAttributes) {
super(helper, worker, vertexShader, fragmentShader, customAttributes);
// By default only a position attribute is required to render polygons
this.attributes = [
{
name: Attributes.POSITION,
size: 2,
type: AttributeType.FLOAT,
},
].concat(
customAttributes.map(function (attribute) {
return {
name: 'a_' + attribute.name,
size: 1,
type: AttributeType.FLOAT,
};
})
);
}
/**
* Render instructions for polygons are structured like so:
* [ customAttr0, ..., customAttrN, numberOfRings, numberOfVerticesInRing0, ..., numberOfVerticesInRingN, x0, y0, ..., xN, yN, numberOfRings,... ]
* @param {import("./MixedGeometryBatch.js").PolygonGeometryBatch} batch Polygon geometry batch
* @override
*/
generateRenderInstructions(batch) {
// here we anticipate the amount of render instructions for polygons:
// 2 instructions per vertex for position (x and y)
// + 1 instruction per polygon per custom attributes
// + 1 instruction per polygon (for vertices count in polygon)
// + 1 instruction per ring (for vertices count in ring)
const totalInstructionsCount =
2 * batch.verticesCount +
(1 + this.customAttributes.length) * batch.geometriesCount +
batch.ringsCount;
if (
!batch.renderInstructions ||
batch.renderInstructions.length !== totalInstructionsCount
) {
batch.renderInstructions = new Float32Array(totalInstructionsCount);
}
// loop on features to fill the render instructions
let batchEntry;
const flatCoords = [];
let renderIndex = 0;
let value;
for (const featureUid in batch.entries) {
batchEntry = batch.entries[featureUid];
for (let i = 0, ii = batchEntry.flatCoordss.length; i < ii; i++) {
flatCoords.length = batchEntry.flatCoordss[i].length;
transform2D(
batchEntry.flatCoordss[i],
0,
flatCoords.length,
2,
batch.renderInstructionsTransform,
flatCoords
);
// custom attributes
for (let k = 0, kk = this.customAttributes.length; k < kk; k++) {
value = this.customAttributes[k].callback(batchEntry.feature);
batch.renderInstructions[renderIndex++] = value;
}
// ring count
batch.renderInstructions[renderIndex++] =
batchEntry.ringsVerticesCounts[i].length;
// vertices count in each ring
for (
let j = 0, jj = batchEntry.ringsVerticesCounts[i].length;
j < jj;
j++
) {
batch.renderInstructions[renderIndex++] =
batchEntry.ringsVerticesCounts[i][j];
}
// looping on points for positions
for (let j = 0, jj = flatCoords.length; j < jj; j += 2) {
batch.renderInstructions[renderIndex++] = flatCoords[j];
batch.renderInstructions[renderIndex++] = flatCoords[j + 1];
}
}
}
}
}
export default PolygonBatchRenderer;

View File

@@ -0,0 +1,27 @@
/**
* @module ol/render/webgl/constants
*/
/**
* @enum {string}
*/
export const WebGLWorkerMessageType = {
GENERATE_POLYGON_BUFFERS: 'GENERATE_POLYGON_BUFFERS',
GENERATE_POINT_BUFFERS: 'GENERATE_POINT_BUFFERS',
GENERATE_LINE_STRING_BUFFERS: 'GENERATE_LINE_STRING_BUFFERS',
};
/**
* @typedef {Object} WebGLWorkerGenerateBuffersMessage
* This message will trigger the generation of a vertex and an index buffer based on the given render instructions.
* When the buffers are generated, the worked will send a message of the same type to the main thread, with
* the generated buffers in it.
* Note that any addition properties present in the message *will* be sent back to the main thread.
* @property {number} id Message id; will be used both in request and response as a means of identification
* @property {WebGLWorkerMessageType} type Message type
* @property {ArrayBuffer} renderInstructions Polygon render instructions raw binary buffer.
* @property {number} [customAttributesCount] Amount of custom attributes count in the polygon render instructions.
* @property {ArrayBuffer} [vertexBuffer] Vertices array raw binary buffer (sent by the worker).
* @property {ArrayBuffer} [indexBuffer] Indices array raw binary buffer (sent by the worker).
* @property {import("../../transform").Transform} [renderInstructionsTransform] Transformation matrix used to project the instructions coordinates
*/

View File

@@ -0,0 +1,351 @@
/**
* @module ol/render/webgl/utils
*/
import earcut from 'earcut';
import {apply as applyTransform} from '../../transform.js';
import {clamp} from '../../math.js';
const tmpArray_ = [];
/**
* An object holding positions both in an index and a vertex buffer.
* @typedef {Object} BufferPositions
* @property {number} vertexPosition Position in the vertex buffer
* @property {number} indexPosition Position in the index buffer
*/
const bufferPositions_ = {vertexPosition: 0, indexPosition: 0};
function writePointVertex(buffer, pos, x, y, index) {
buffer[pos + 0] = x;
buffer[pos + 1] = y;
buffer[pos + 2] = index;
}
/**
* Pushes a quad (two triangles) based on a point geometry
* @param {Float32Array} instructions Array of render instructions for points.
* @param {number} elementIndex Index from which render instructions will be read.
* @param {Float32Array} vertexBuffer Buffer in the form of a typed array.
* @param {Uint32Array} indexBuffer Buffer in the form of a typed array.
* @param {number} customAttributesCount Amount of custom attributes for each element.
* @param {BufferPositions} [bufferPositions] Buffer write positions; if not specified, positions will be set at 0.
* @return {BufferPositions} New buffer positions where to write next
* @property {number} vertexPosition New position in the vertex buffer where future writes should start.
* @property {number} indexPosition New position in the index buffer where future writes should start.
* @private
*/
export function writePointFeatureToBuffers(
instructions,
elementIndex,
vertexBuffer,
indexBuffer,
customAttributesCount,
bufferPositions
) {
// This is for x, y and index
const baseVertexAttrsCount = 3;
const baseInstructionsCount = 2;
const stride = baseVertexAttrsCount + customAttributesCount;
const x = instructions[elementIndex + 0];
const y = instructions[elementIndex + 1];
// read custom numerical attributes on the feature
const customAttrs = tmpArray_;
customAttrs.length = customAttributesCount;
for (let i = 0; i < customAttrs.length; i++) {
customAttrs[i] = instructions[elementIndex + baseInstructionsCount + i];
}
let vPos = bufferPositions ? bufferPositions.vertexPosition : 0;
let iPos = bufferPositions ? bufferPositions.indexPosition : 0;
const baseIndex = vPos / stride;
// push vertices for each of the four quad corners (first standard then custom attributes)
writePointVertex(vertexBuffer, vPos, x, y, 0);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
writePointVertex(vertexBuffer, vPos, x, y, 1);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
writePointVertex(vertexBuffer, vPos, x, y, 2);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
writePointVertex(vertexBuffer, vPos, x, y, 3);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
indexBuffer[iPos++] = baseIndex;
indexBuffer[iPos++] = baseIndex + 1;
indexBuffer[iPos++] = baseIndex + 3;
indexBuffer[iPos++] = baseIndex + 1;
indexBuffer[iPos++] = baseIndex + 2;
indexBuffer[iPos++] = baseIndex + 3;
bufferPositions_.vertexPosition = vPos;
bufferPositions_.indexPosition = iPos;
return bufferPositions_;
}
/**
* Pushes a single quad to form a line segment; also includes a computation for the join angles with previous and next
* segment, in order to be able to offset the vertices correctly in the shader
* @param {Float32Array} instructions Array of render instructions for lines.
* @param {number} segmentStartIndex Index of the segment start point from which render instructions will be read.
* @param {number} segmentEndIndex Index of the segment start point from which render instructions will be read.
* @param {number|null} beforeSegmentIndex Index of the point right before the segment (null if none, e.g this is a line start)
* @param {number|null} afterSegmentIndex Index of the point right after the segment (null if none, e.g this is a line end)
* @param {Array<number>} vertexArray Array containing vertices.
* @param {Array<number>} indexArray Array containing indices.
* @param {Array<number>} customAttributes Array of custom attributes value
* @param {import('../../transform.js').Transform} instructionsTransform Transform matrix used to project coordinates in instructions
* @param {import('../../transform.js').Transform} invertInstructionsTransform Transform matrix used to project coordinates in instructions
* @private
*/
export function writeLineSegmentToBuffers(
instructions,
segmentStartIndex,
segmentEndIndex,
beforeSegmentIndex,
afterSegmentIndex,
vertexArray,
indexArray,
customAttributes,
instructionsTransform,
invertInstructionsTransform
) {
// compute the stride to determine how many vertices were already pushed
const baseVertexAttrsCount = 5; // base attributes: x0, y0, x1, y1, params (vertex number [0-3], join angle 1, join angle 2)
const stride = baseVertexAttrsCount + customAttributes.length;
const baseIndex = vertexArray.length / stride;
// The segment is composed of two positions called P0[x0, y0] and P1[x1, y1]
// Depending on whether there are points before and after the segment, its final shape
// will be different
const p0 = [
instructions[segmentStartIndex + 0],
instructions[segmentStartIndex + 1],
];
const p1 = [instructions[segmentEndIndex], instructions[segmentEndIndex + 1]];
// to compute offsets from the line center we need to reproject
// coordinates back in world units and compute the length of the segment
const p0world = applyTransform(invertInstructionsTransform, [...p0]);
const p1world = applyTransform(invertInstructionsTransform, [...p1]);
function computeVertexParameters(vertexNumber, joinAngle1, joinAngle2) {
const shift = 10000;
const anglePrecision = 1500;
return (
Math.round(joinAngle1 * anglePrecision) +
Math.round(joinAngle2 * anglePrecision) * shift +
vertexNumber * shift * shift
);
}
// compute the angle between p0pA and p0pB
// returns a value in [0, 2PI]
function angleBetween(p0, pA, pB) {
const lenA = Math.sqrt(
(pA[0] - p0[0]) * (pA[0] - p0[0]) + (pA[1] - p0[1]) * (pA[1] - p0[1])
);
const tangentA = [(pA[0] - p0[0]) / lenA, (pA[1] - p0[1]) / lenA];
const orthoA = [-tangentA[1], tangentA[0]];
const lenB = Math.sqrt(
(pB[0] - p0[0]) * (pB[0] - p0[0]) + (pB[1] - p0[1]) * (pB[1] - p0[1])
);
const tangentB = [(pB[0] - p0[0]) / lenB, (pB[1] - p0[1]) / lenB];
// this angle can be clockwise or anticlockwise; hence the computation afterwards
const angle =
lenA === 0 || lenB === 0
? 0
: Math.acos(
clamp(tangentB[0] * tangentA[0] + tangentB[1] * tangentA[1], -1, 1)
);
const isClockwise = tangentB[0] * orthoA[0] + tangentB[1] * orthoA[1] > 0;
return !isClockwise ? Math.PI * 2 - angle : angle;
}
const joinBefore = beforeSegmentIndex !== null;
const joinAfter = afterSegmentIndex !== null;
let angle0 = 0;
let angle1 = 0;
// add vertices and adapt offsets for P0 in case of join
if (joinBefore) {
// B for before
const pB = [
instructions[beforeSegmentIndex],
instructions[beforeSegmentIndex + 1],
];
const pBworld = applyTransform(invertInstructionsTransform, [...pB]);
angle0 = angleBetween(p0world, p1world, pBworld);
}
// adapt offsets for P1 in case of join
if (joinAfter) {
// A for after
const pA = [
instructions[afterSegmentIndex],
instructions[afterSegmentIndex + 1],
];
const pAworld = applyTransform(invertInstructionsTransform, [...pA]);
angle1 = angleBetween(p1world, p0world, pAworld);
}
// add main segment triangles
vertexArray.push(
p0[0],
p0[1],
p1[0],
p1[1],
computeVertexParameters(0, angle0, angle1)
);
vertexArray.push(...customAttributes);
vertexArray.push(
p0[0],
p0[1],
p1[0],
p1[1],
computeVertexParameters(1, angle0, angle1)
);
vertexArray.push(...customAttributes);
vertexArray.push(
p0[0],
p0[1],
p1[0],
p1[1],
computeVertexParameters(2, angle0, angle1)
);
vertexArray.push(...customAttributes);
vertexArray.push(
p0[0],
p0[1],
p1[0],
p1[1],
computeVertexParameters(3, angle0, angle1)
);
vertexArray.push(...customAttributes);
indexArray.push(
baseIndex,
baseIndex + 1,
baseIndex + 2,
baseIndex + 1,
baseIndex + 3,
baseIndex + 2
);
}
/**
* Pushes several triangles to form a polygon, including holes
* @param {Float32Array} instructions Array of render instructions for lines.
* @param {number} polygonStartIndex Index of the polygon start point from which render instructions will be read.
* @param {Array<number>} vertexArray Array containing vertices.
* @param {Array<number>} indexArray Array containing indices.
* @param {number} customAttributesCount Amount of custom attributes for each element.
* @return {number} Next polygon instructions index
* @private
*/
export function writePolygonTrianglesToBuffers(
instructions,
polygonStartIndex,
vertexArray,
indexArray,
customAttributesCount
) {
const instructionsPerVertex = 2; // x, y
const attributesPerVertex = 2 + customAttributesCount;
let instructionsIndex = polygonStartIndex;
const customAttributes = instructions.slice(
instructionsIndex,
instructionsIndex + customAttributesCount
);
instructionsIndex += customAttributesCount;
const ringsCount = instructions[instructionsIndex++];
let verticesCount = 0;
const holes = new Array(ringsCount - 1);
for (let i = 0; i < ringsCount; i++) {
verticesCount += instructions[instructionsIndex++];
if (i < ringsCount - 1) {
holes[i] = verticesCount;
}
}
const flatCoords = instructions.slice(
instructionsIndex,
instructionsIndex + verticesCount * instructionsPerVertex
);
// pushing to vertices and indices!! this is where the magic happens
const result = earcut(flatCoords, holes, instructionsPerVertex);
for (let i = 0; i < result.length; i++) {
indexArray.push(result[i] + vertexArray.length / attributesPerVertex);
}
for (let i = 0; i < flatCoords.length; i += 2) {
vertexArray.push(flatCoords[i], flatCoords[i + 1], ...customAttributes);
}
return instructionsIndex + verticesCount * instructionsPerVertex;
}
/**
* Returns a texture of 1x1 pixel, white
* @private
* @return {ImageData} Image data.
*/
export function getBlankImageData() {
const canvas = document.createElement('canvas');
const image = canvas.getContext('2d').createImageData(1, 1);
image.data[0] = 255;
image.data[1] = 255;
image.data[2] = 255;
image.data[3] = 255;
return image;
}
/**
* Generates a color array based on a numerical id
* Note: the range for each component is 0 to 1 with 256 steps
* @param {number} id Id
* @param {Array<number>} [opt_array] Reusable array
* @return {Array<number>} Color array containing the encoded id
*/
export function colorEncodeId(id, opt_array) {
const array = opt_array || [];
const radix = 256;
const divide = radix - 1;
array[0] = Math.floor(id / radix / radix / radix) / divide;
array[1] = (Math.floor(id / radix / radix) % radix) / divide;
array[2] = (Math.floor(id / radix) % radix) / divide;
array[3] = (id % radix) / divide;
return array;
}
/**
* Reads an id from a color-encoded array
* Note: the expected range for each component is 0 to 1 with 256 steps.
* @param {Array<number>} color Color array containing the encoded id
* @return {number} Decoded id
*/
export function colorDecodeId(color) {
let id = 0;
const radix = 256;
const mult = radix - 1;
id += Math.round(color[0] * radix * radix * radix * mult);
id += Math.round(color[1] * radix * radix * mult);
id += Math.round(color[2] * radix * mult);
id += Math.round(color[3] * mult);
return id;
}

View File

@@ -13,26 +13,6 @@ import {
} from '../../transform.js';
import {containsCoordinate} from '../../extent.js';
/**
* @enum {string}
*/
export const WebGLWorkerMessageType = {
GENERATE_BUFFERS: 'GENERATE_BUFFERS',
};
/**
* @typedef {Object} WebGLWorkerGenerateBuffersMessage
* This message will trigger the generation of a vertex and an index buffer based on the given render instructions.
* When the buffers are generated, the worked will send a message of the same type to the main thread, with
* the generated buffers in it.
* Note that any addition properties present in the message *will* be sent back to the main thread.
* @property {WebGLWorkerMessageType} type Message type
* @property {ArrayBuffer} renderInstructions Render instructions raw binary buffer.
* @property {ArrayBuffer} [vertexBuffer] Vertices array raw binary buffer (sent by the worker).
* @property {ArrayBuffer} [indexBuffer] Indices array raw binary buffer (sent by the worker).
* @property {number} [customAttributesCount] Amount of custom attributes count in the render instructions.
*/
/**
* @typedef {Object} PostProcessesOptions
* @property {number} [scaleRatio] Scale ratio; if < 1, the post process will render to a texture smaller than
@@ -343,144 +323,4 @@ class WebGLLayerRenderer extends LayerRenderer {
}
}
const tmpArray_ = [];
const bufferPositions_ = {vertexPosition: 0, indexPosition: 0};
function writePointVertex(buffer, pos, x, y, index) {
buffer[pos + 0] = x;
buffer[pos + 1] = y;
buffer[pos + 2] = index;
}
/**
* An object holding positions both in an index and a vertex buffer.
* @typedef {Object} BufferPositions
* @property {number} vertexPosition Position in the vertex buffer
* @property {number} indexPosition Position in the index buffer
*/
/**
* Pushes a quad (two triangles) based on a point geometry
* @param {Float32Array} instructions Array of render instructions for points.
* @param {number} elementIndex Index from which render instructions will be read.
* @param {Float32Array} vertexBuffer Buffer in the form of a typed array.
* @param {Uint32Array} indexBuffer Buffer in the form of a typed array.
* @param {number} customAttributesCount Amount of custom attributes for each element.
* @param {BufferPositions} [bufferPositions] Buffer write positions; if not specified, positions will be set at 0.
* @return {BufferPositions} New buffer positions where to write next
* @property {number} vertexPosition New position in the vertex buffer where future writes should start.
* @property {number} indexPosition New position in the index buffer where future writes should start.
* @private
*/
export function writePointFeatureToBuffers(
instructions,
elementIndex,
vertexBuffer,
indexBuffer,
customAttributesCount,
bufferPositions
) {
// This is for x, y and index
const baseVertexAttrsCount = 3;
const baseInstructionsCount = 2;
const stride = baseVertexAttrsCount + customAttributesCount;
const x = instructions[elementIndex + 0];
const y = instructions[elementIndex + 1];
// read custom numerical attributes on the feature
const customAttrs = tmpArray_;
customAttrs.length = customAttributesCount;
for (let i = 0; i < customAttrs.length; i++) {
customAttrs[i] = instructions[elementIndex + baseInstructionsCount + i];
}
let vPos = bufferPositions ? bufferPositions.vertexPosition : 0;
let iPos = bufferPositions ? bufferPositions.indexPosition : 0;
const baseIndex = vPos / stride;
// push vertices for each of the four quad corners (first standard then custom attributes)
writePointVertex(vertexBuffer, vPos, x, y, 0);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
writePointVertex(vertexBuffer, vPos, x, y, 1);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
writePointVertex(vertexBuffer, vPos, x, y, 2);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
writePointVertex(vertexBuffer, vPos, x, y, 3);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
indexBuffer[iPos++] = baseIndex;
indexBuffer[iPos++] = baseIndex + 1;
indexBuffer[iPos++] = baseIndex + 3;
indexBuffer[iPos++] = baseIndex + 1;
indexBuffer[iPos++] = baseIndex + 2;
indexBuffer[iPos++] = baseIndex + 3;
bufferPositions_.vertexPosition = vPos;
bufferPositions_.indexPosition = iPos;
return bufferPositions_;
}
/**
* Returns a texture of 1x1 pixel, white
* @private
* @return {ImageData} Image data.
*/
export function getBlankImageData() {
const canvas = document.createElement('canvas');
const image = canvas.getContext('2d').createImageData(1, 1);
image.data[0] = 255;
image.data[1] = 255;
image.data[2] = 255;
image.data[3] = 255;
return image;
}
/**
* Generates a color array based on a numerical id
* Note: the range for each component is 0 to 1 with 256 steps
* @param {number} id Id
* @param {Array<number>} [opt_array] Reusable array
* @return {Array<number>} Color array containing the encoded id
*/
export function colorEncodeId(id, opt_array) {
const array = opt_array || [];
const radix = 256;
const divide = radix - 1;
array[0] = Math.floor(id / radix / radix / radix) / divide;
array[1] = (Math.floor(id / radix / radix) % radix) / divide;
array[2] = (Math.floor(id / radix) % radix) / divide;
array[3] = (id % radix) / divide;
return array;
}
/**
* Reads an id from a color-encoded array
* Note: the expected range for each component is 0 to 1 with 256 steps.
* @param {Array<number>} color Color array containing the encoded id
* @return {number} Decoded id
*/
export function colorDecodeId(color) {
let id = 0;
const radix = 256;
const mult = radix - 1;
id += Math.round(color[0] * radix * radix * radix * mult);
id += Math.round(color[1] * radix * radix * mult);
id += Math.round(color[2] * radix * mult);
id += Math.round(color[3] * mult);
return id;
}
export default WebGLLayerRenderer;

View File

@@ -5,14 +5,11 @@ import BaseVector from '../../layer/BaseVector.js';
import VectorEventType from '../../source/VectorEventType.js';
import ViewHint from '../../ViewHint.js';
import WebGLArrayBuffer from '../../webgl/Buffer.js';
import WebGLLayerRenderer, {
WebGLWorkerMessageType,
colorDecodeId,
colorEncodeId,
} from './Layer.js';
import WebGLLayerRenderer from './Layer.js';
import WebGLRenderTarget from '../../webgl/RenderTarget.js';
import {ARRAY_BUFFER, DYNAMIC_DRAW, ELEMENT_ARRAY_BUFFER} from '../../webgl.js';
import {AttributeType, DefaultUniform} from '../../webgl/Helper.js';
import {WebGLWorkerMessageType} from '../../render/webgl/constants.js';
import {
apply as applyTransform,
create as createTransform,
@@ -22,6 +19,7 @@ import {
} from '../../transform.js';
import {assert} from '../../asserts.js';
import {buffer, createEmpty, equals, getWidth} from '../../extent.js';
import {colorDecodeId, colorEncodeId} from '../../render/webgl/utils.js';
import {create as createWebGLWorker} from '../../worker/webgl.js';
import {getUid} from '../../util.js';
import {listen, unlistenByKey} from '../../events.js';
@@ -294,7 +292,11 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
*/
this.generateBuffersRun_ = 0;
/**
* @private
*/
this.worker_ = createWebGLWorker();
this.worker_.addEventListener(
'message',
/**
@@ -303,7 +305,7 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
*/
function (event) {
const received = event.data;
if (received.type === WebGLWorkerMessageType.GENERATE_BUFFERS) {
if (received.type === WebGLWorkerMessageType.GENERATE_POINT_BUFFERS) {
const projectionTransform = received.projectionTransform;
if (received.hitDetection) {
this.hitVerticesBuffer_.fromArrayBuffer(received.vertexBuffer);
@@ -540,7 +542,7 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
this.previousExtent_ = frameState.extent.slice();
}
this.helper.useProgram(this.program_);
this.helper.useProgram(this.program_, frameState);
this.helper.prepareDraw(frameState);
// write new data
@@ -637,9 +639,10 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
}
}
/** @type {import('./Layer').WebGLWorkerGenerateBuffersMessage} */
/** @type {import('../../render/webgl/constants.js').WebGLWorkerGenerateBuffersMessage} */
const message = {
type: WebGLWorkerMessageType.GENERATE_BUFFERS,
id: 0,
type: WebGLWorkerMessageType.GENERATE_POINT_BUFFERS,
renderInstructions: this.renderInstructions_.buffer,
customAttributesCount: this.customAttributes.length,
};
@@ -650,10 +653,11 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
this.worker_.postMessage(message, [this.renderInstructions_.buffer]);
this.renderInstructions_ = null;
/** @type {import('./Layer').WebGLWorkerGenerateBuffersMessage} */
/** @type {import('../../render/webgl/constants.js').WebGLWorkerGenerateBuffersMessage} */
if (this.hitDetectionEnabled_) {
const hitMessage = {
type: WebGLWorkerMessageType.GENERATE_BUFFERS,
id: 0,
type: WebGLWorkerMessageType.GENERATE_POINT_BUFFERS,
renderInstructions: this.hitRenderInstructions_.buffer,
customAttributesCount: 5 + this.customAttributes.length,
};
@@ -726,7 +730,7 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
Math.floor(frameState.size[1] / 2),
]);
this.helper.useProgram(this.hitProgram_);
this.helper.useProgram(this.hitProgram_, frameState);
this.helper.prepareDrawToRenderTarget(
frameState,
this.hitRenderTarget_,

View File

@@ -508,7 +508,7 @@ class WebGLTileLayerRenderer extends WebGLLayerRenderer {
}
}
this.helper.useProgram(this.program_);
this.helper.useProgram(this.program_, frameState);
this.helper.prepareDraw(frameState, !blend);
const zs = Object.keys(tileTexturesByZ)

View File

@@ -0,0 +1,409 @@
/**
* @module ol/renderer/webgl/VectorLayer
*/
import BaseVector from '../../layer/BaseVector.js';
import LineStringBatchRenderer from '../../render/webgl/LineStringBatchRenderer.js';
import MixedGeometryBatch from '../../render/webgl/MixedGeometryBatch.js';
import PointBatchRenderer from '../../render/webgl/PointBatchRenderer.js';
import PolygonBatchRenderer from '../../render/webgl/PolygonBatchRenderer.js';
import VectorEventType from '../../source/VectorEventType.js';
import ViewHint from '../../ViewHint.js';
import WebGLLayerRenderer from './Layer.js';
import {DefaultUniform} from '../../webgl/Helper.js';
import {
FILL_FRAGMENT_SHADER,
FILL_VERTEX_SHADER,
POINT_FRAGMENT_SHADER,
POINT_VERTEX_SHADER,
STROKE_FRAGMENT_SHADER,
STROKE_VERTEX_SHADER,
packColor,
} from './shaders.js';
import {buffer, createEmpty, equals, getWidth} from '../../extent.js';
import {create as createTransform} from '../../transform.js';
import {create as createWebGLWorker} from '../../worker/webgl.js';
import {listen, unlistenByKey} from '../../events.js';
/**
* @typedef {function(import("../../Feature").default, Object<string, *>):number} CustomAttributeCallback A callback computing
* the value of a custom attribute (different for each feature) to be passed on to the GPU.
* Properties are available as 2nd arg for quicker access.
*/
/**
* @typedef {Object} ShaderProgram An object containing both shaders (vertex and fragment) as well as the required attributes
* @property {string} [vertexShader] Vertex shader source (using the default one if unspecified).
* @property {string} [fragmentShader] Fragment shader source (using the default one if unspecified).
* @property {Object<import("./shaders.js").DefaultAttributes,CustomAttributeCallback>} attributes Custom attributes made available in the vertex shader.
* Keys are the names of the attributes which are then accessible in the vertex shader using the `a_` prefix, e.g.: `a_opacity`.
* Default shaders rely on the attributes in {@link module:ol/render/webgl/shaders~DefaultAttributes}.
*/
/**
* @typedef {Object} Options
* @property {string} [className='ol-layer'] A CSS class name to set to the canvas element.
* @property {ShaderProgram} [fill] Attributes and shaders for filling polygons.
* @property {ShaderProgram} [stroke] Attributes and shaders for line strings and polygon strokes.
* @property {ShaderProgram} [point] Attributes and shaders for points.
* @property {Object<string,import("../../webgl/Helper").UniformValue>} [uniforms] Uniform definitions.
* @property {Array<import("./Layer").PostProcessesOptions>} [postProcesses] Post-processes definitions
*/
/**
* @param {Object<import("./shaders.js").DefaultAttributes,CustomAttributeCallback>} obj Lookup of attribute getters.
* @return {Array<import("../../render/webgl/BatchRenderer").CustomAttribute>} An array of attribute descriptors.
*/
function toAttributesArray(obj) {
return Object.keys(obj).map((key) => ({name: key, callback: obj[key]}));
}
/**
* @classdesc
* Experimental WebGL vector renderer. Supports polygons, lines and points:
* * Polygons are broken down into triangles
* * Lines are rendered as strips of quads
* * Points are rendered as quads
*
* You need to provide vertex and fragment shaders as well as custom attributes for each type of geometry. All shaders
* can access the uniforms in the {@link module:ol/webgl/Helper~DefaultUniform} enum.
* The vertex shaders can access the following attributes depending on the geometry type:
* * For polygons: {@link module:ol/render/webgl/PolygonBatchRenderer~Attributes}
* * For line strings: {@link module:ol/render/webgl/LineStringBatchRenderer~Attributes}
* * For points: {@link module:ol/render/webgl/PointBatchRenderer~Attributes}
*
* Please note that the fragment shaders output should have premultiplied alpha, otherwise visual anomalies may occur.
*
* Note: this uses {@link module:ol/webgl/Helper~WebGLHelper} internally.
*/
class WebGLVectorLayerRenderer extends WebGLLayerRenderer {
/**
* @param {import("../../layer/Layer.js").default} layer Layer.
* @param {Options} options Options.
*/
constructor(layer, options) {
const uniforms = options.uniforms || {};
const projectionMatrixTransform = createTransform();
uniforms[DefaultUniform.PROJECTION_MATRIX] = projectionMatrixTransform;
super(layer, {
uniforms: uniforms,
postProcesses: options.postProcesses,
});
this.sourceRevision_ = -1;
this.previousExtent_ = createEmpty();
/**
* This transform is updated on every frame and is the composition of:
* - invert of the world->screen transform that was used when rebuilding buffers (see `this.renderTransform_`)
* - current world->screen transform
* @type {import("../../transform.js").Transform}
* @private
*/
this.currentTransform_ = projectionMatrixTransform;
const fillAttributes = {
color: function () {
return packColor('#ddd');
},
opacity: function () {
return 1;
},
...(options.fill && options.fill.attributes),
};
const strokeAttributes = {
color: function () {
return packColor('#eee');
},
opacity: function () {
return 1;
},
width: function () {
return 1.5;
},
...(options.stroke && options.stroke.attributes),
};
const pointAttributes = {
color: function () {
return packColor('#eee');
},
opacity: function () {
return 1;
},
...(options.point && options.point.attributes),
};
this.fillVertexShader_ =
(options.fill && options.fill.vertexShader) || FILL_VERTEX_SHADER;
this.fillFragmentShader_ =
(options.fill && options.fill.fragmentShader) || FILL_FRAGMENT_SHADER;
this.fillAttributes_ = toAttributesArray(fillAttributes);
this.strokeVertexShader_ =
(options.stroke && options.stroke.vertexShader) || STROKE_VERTEX_SHADER;
this.strokeFragmentShader_ =
(options.stroke && options.stroke.fragmentShader) ||
STROKE_FRAGMENT_SHADER;
this.strokeAttributes_ = toAttributesArray(strokeAttributes);
this.pointVertexShader_ =
(options.point && options.point.vertexShader) || POINT_VERTEX_SHADER;
this.pointFragmentShader_ =
(options.point && options.point.fragmentShader) || POINT_FRAGMENT_SHADER;
this.pointAttributes_ = toAttributesArray(pointAttributes);
/**
* @private
*/
this.worker_ = createWebGLWorker();
/**
* @private
*/
this.batch_ = new MixedGeometryBatch();
const source = this.getLayer().getSource();
this.batch_.addFeatures(source.getFeatures());
this.sourceListenKeys_ = [
listen(
source,
VectorEventType.ADDFEATURE,
this.handleSourceFeatureAdded_,
this
),
listen(
source,
VectorEventType.CHANGEFEATURE,
this.handleSourceFeatureChanged_,
this
),
listen(
source,
VectorEventType.REMOVEFEATURE,
this.handleSourceFeatureDelete_,
this
),
listen(
source,
VectorEventType.CLEAR,
this.handleSourceFeatureClear_,
this
),
];
}
afterHelperCreated() {
this.polygonRenderer_ = new PolygonBatchRenderer(
this.helper,
this.worker_,
this.fillVertexShader_,
this.fillFragmentShader_,
this.fillAttributes_
);
this.pointRenderer_ = new PointBatchRenderer(
this.helper,
this.worker_,
this.pointVertexShader_,
this.pointFragmentShader_,
this.pointAttributes_
);
this.lineStringRenderer_ = new LineStringBatchRenderer(
this.helper,
this.worker_,
this.strokeVertexShader_,
this.strokeFragmentShader_,
this.strokeAttributes_
);
}
/**
* @param {import("../../source/Vector.js").VectorSourceEvent} event Event.
* @private
*/
handleSourceFeatureAdded_(event) {
const feature = event.feature;
this.batch_.addFeature(feature);
}
/**
* @param {import("../../source/Vector.js").VectorSourceEvent} event Event.
* @private
*/
handleSourceFeatureChanged_(event) {
const feature = event.feature;
this.batch_.changeFeature(feature);
}
/**
* @param {import("../../source/Vector.js").VectorSourceEvent} event Event.
* @private
*/
handleSourceFeatureDelete_(event) {
const feature = event.feature;
this.batch_.removeFeature(feature);
}
/**
* @private
*/
handleSourceFeatureClear_() {
this.batch_.clear();
}
/**
* Render the layer.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {HTMLElement} The rendered element.
*/
renderFrame(frameState) {
const gl = this.helper.getGL();
this.preRender(gl, frameState);
const layer = this.getLayer();
const vectorSource = layer.getSource();
const projection = frameState.viewState.projection;
const multiWorld = vectorSource.getWrapX() && projection.canWrapX();
const projectionExtent = projection.getExtent();
const extent = frameState.extent;
const worldWidth = multiWorld ? getWidth(projectionExtent) : null;
const endWorld = multiWorld
? Math.ceil((extent[2] - projectionExtent[2]) / worldWidth) + 1
: 1;
let world = multiWorld
? Math.floor((extent[0] - projectionExtent[0]) / worldWidth)
: 0;
do {
this.polygonRenderer_.render(
this.batch_.polygonBatch,
this.currentTransform_,
frameState,
world * worldWidth
);
this.lineStringRenderer_.render(
this.batch_.lineStringBatch,
this.currentTransform_,
frameState,
world * worldWidth
);
this.pointRenderer_.render(
this.batch_.pointBatch,
this.currentTransform_,
frameState,
world * worldWidth
);
} while (++world < endWorld);
this.helper.finalizeDraw(frameState);
const canvas = this.helper.getCanvas();
const layerState = frameState.layerStatesArray[frameState.layerIndex];
const opacity = layerState.opacity;
if (opacity !== parseFloat(canvas.style.opacity)) {
canvas.style.opacity = String(opacity);
}
this.postRender(gl, frameState);
return canvas;
}
/**
* Determine whether renderFrame should be called.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {boolean} Layer is ready to be rendered.
*/
prepareFrameInternal(frameState) {
const layer = this.getLayer();
const vectorSource = layer.getSource();
const viewState = frameState.viewState;
const viewNotMoving =
!frameState.viewHints[ViewHint.ANIMATING] &&
!frameState.viewHints[ViewHint.INTERACTING];
const extentChanged = !equals(this.previousExtent_, frameState.extent);
const sourceChanged = this.sourceRevision_ < vectorSource.getRevision();
if (sourceChanged) {
this.sourceRevision_ = vectorSource.getRevision();
}
if (viewNotMoving && (extentChanged || sourceChanged)) {
const projection = viewState.projection;
const resolution = viewState.resolution;
const renderBuffer =
layer instanceof BaseVector ? layer.getRenderBuffer() : 0;
const extent = buffer(frameState.extent, renderBuffer * resolution);
vectorSource.loadFeatures(extent, resolution, projection);
this.ready = false;
let remaining = 3;
const rebuildCb = () => {
remaining--;
this.ready = remaining <= 0;
this.getLayer().changed();
};
this.polygonRenderer_.rebuild(
this.batch_.polygonBatch,
frameState,
'Polygon',
rebuildCb
);
this.lineStringRenderer_.rebuild(
this.batch_.lineStringBatch,
frameState,
'LineString',
rebuildCb
);
this.pointRenderer_.rebuild(
this.batch_.pointBatch,
frameState,
'Point',
rebuildCb
);
this.previousExtent_ = frameState.extent.slice();
}
this.helper.makeProjectionTransform(frameState, this.currentTransform_);
this.helper.prepareDraw(frameState);
return true;
}
/**
* @param {import("../../coordinate.js").Coordinate} coordinate Coordinate.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @param {number} hitTolerance Hit tolerance in pixels.
* @param {import("../vector.js").FeatureCallback<T>} callback Feature callback.
* @param {Array<import("../Map.js").HitMatch<T>>} matches The hit detected matches with tolerance.
* @return {T|undefined} Callback result.
* @template T
*/
forEachFeatureAtCoordinate(
coordinate,
frameState,
hitTolerance,
callback,
matches
) {
return undefined;
}
/**
* Clean up.
*/
disposeInternal() {
this.worker_.terminate();
this.layer_ = null;
this.sourceListenKeys_.forEach(function (key) {
unlistenByKey(key);
});
this.sourceListenKeys_ = null;
super.disposeInternal();
}
}
export default WebGLVectorLayerRenderer;

View File

@@ -0,0 +1,198 @@
/**
* @module ol/renderer/webgl/shaders
*/
import {asArray} from '../../color.js';
/** @typedef {'color'|'opacity'|'width'} DefaultAttributes */
/**
* Packs red/green/blue channels of a color into a single float value; alpha is ignored.
* This is how the color is expected to be computed.
* @param {import("../../color.js").Color|string} color Color as array of numbers or string
* @return {number} Float value containing the color
*/
export function packColor(color) {
const array = asArray(color);
const r = array[0] * 256 * 256;
const g = array[1] * 256;
const b = array[2];
return r + g + b;
}
const DECODE_COLOR_EXPRESSION = `vec3(
fract(floor(a_color / 256.0 / 256.0) / 256.0),
fract(floor(a_color / 256.0) / 256.0),
fract(a_color / 256.0)
);`;
/**
* Default polygon vertex shader.
* Relies on the color and opacity attributes.
* @type {string}
*/
export const FILL_VERTEX_SHADER = `
precision mediump float;
uniform mat4 u_projectionMatrix;
attribute vec2 a_position;
attribute float a_color;
attribute float a_opacity;
varying vec3 v_color;
varying float v_opacity;
void main(void) {
gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0);
v_color = ${DECODE_COLOR_EXPRESSION}
v_opacity = a_opacity;
}`;
/**
* Default polygon fragment shader.
* @type {string}
*/
export const FILL_FRAGMENT_SHADER = `
precision mediump float;
varying vec3 v_color;
varying float v_opacity;
void main(void) {
gl_FragColor = vec4(v_color, 1.0) * v_opacity;
}`;
/**
* Default linestring vertex shader.
* Relies on color, opacity and width attributes.
* @type {string}
*/
export const STROKE_VERTEX_SHADER = `
precision mediump float;
uniform mat4 u_projectionMatrix;
uniform vec2 u_sizePx;
attribute vec2 a_segmentStart;
attribute vec2 a_segmentEnd;
attribute float a_parameters;
attribute float a_color;
attribute float a_opacity;
attribute float a_width;
varying vec2 v_segmentStart;
varying vec2 v_segmentEnd;
varying float v_angleStart;
varying float v_angleEnd;
varying vec3 v_color;
varying float v_opacity;
varying float v_width;
vec2 worldToPx(vec2 worldPos) {
vec4 screenPos = u_projectionMatrix * vec4(worldPos, 0.0, 1.0);
return (0.5 * screenPos.xy + 0.5) * u_sizePx;
}
vec4 pxToScreen(vec2 pxPos) {
vec2 screenPos = pxPos * 4.0 / u_sizePx;
return vec4(screenPos.xy, 0.0, 0.0);
}
vec2 getOffsetDirection(vec2 normalPx, vec2 tangentPx, float joinAngle) {
if (cos(joinAngle) > 0.93) return normalPx - tangentPx;
float halfAngle = joinAngle / 2.0;
vec2 angleBisectorNormal = vec2(
sin(halfAngle) * normalPx.x + cos(halfAngle) * normalPx.y,
-cos(halfAngle) * normalPx.x + sin(halfAngle) * normalPx.y
);
float length = 1.0 / sin(halfAngle);
return angleBisectorNormal * length;
}
void main(void) {
float anglePrecision = 1500.0;
float paramShift = 10000.0;
v_angleStart = fract(a_parameters / paramShift) * paramShift / anglePrecision;
v_angleEnd = fract(floor(a_parameters / paramShift + 0.5) / paramShift) * paramShift / anglePrecision;
float vertexNumber = floor(a_parameters / paramShift / paramShift + 0.0001);
vec2 tangentPx = worldToPx(a_segmentEnd) - worldToPx(a_segmentStart);
tangentPx = normalize(tangentPx);
vec2 normalPx = vec2(-tangentPx.y, tangentPx.x);
float normalDir = vertexNumber < 0.5 || (vertexNumber > 1.5 && vertexNumber < 2.5) ? 1.0 : -1.0;
float tangentDir = vertexNumber < 1.5 ? 1.0 : -1.0;
float angle = vertexNumber < 1.5 ? v_angleStart : v_angleEnd;
vec2 offsetPx = getOffsetDirection(normalPx * normalDir, tangentDir * tangentPx, angle) * a_width * 0.5;
vec2 position = vertexNumber < 1.5 ? a_segmentStart : a_segmentEnd;
gl_Position = u_projectionMatrix * vec4(position, 0.0, 1.0) + pxToScreen(offsetPx);
v_segmentStart = worldToPx(a_segmentStart);
v_segmentEnd = worldToPx(a_segmentEnd);
v_color = ${DECODE_COLOR_EXPRESSION}
v_opacity = a_opacity;
v_width = a_width;
}`;
/**
* Default linestring fragment shader.
* @type {string}
*/
export const STROKE_FRAGMENT_SHADER = `
precision mediump float;
uniform float u_pixelRatio;
varying vec2 v_segmentStart;
varying vec2 v_segmentEnd;
varying float v_angleStart;
varying float v_angleEnd;
varying vec3 v_color;
varying float v_opacity;
varying float v_width;
float segmentDistanceField(vec2 point, vec2 start, vec2 end, float radius) {
vec2 startToPoint = point - start;
vec2 startToEnd = end - start;
float ratio = clamp(dot(startToPoint, startToEnd) / dot(startToEnd, startToEnd), 0.0, 1.0);
float dist = length(startToPoint - ratio * startToEnd);
return 1.0 - smoothstep(radius - 1.0, radius, dist);
}
void main(void) {
vec2 v_currentPoint = gl_FragCoord.xy / u_pixelRatio;
gl_FragColor = vec4(v_color, 1.0) * v_opacity;
gl_FragColor *= segmentDistanceField(v_currentPoint, v_segmentStart, v_segmentEnd, v_width);
}`;
/**
* Default point vertex shader.
* Relies on color and opacity attributes.
* @type {string}
*/
export const POINT_VERTEX_SHADER = `
precision mediump float;
uniform mat4 u_projectionMatrix;
uniform mat4 u_offsetScaleMatrix;
attribute vec2 a_position;
attribute float a_index;
attribute float a_color;
attribute float a_opacity;
varying vec2 v_texCoord;
varying vec3 v_color;
varying float v_opacity;
void main(void) {
mat4 offsetMatrix = u_offsetScaleMatrix;
float size = 6.0;
float offsetX = a_index == 0.0 || a_index == 3.0 ? -size / 2.0 : size / 2.0;
float offsetY = a_index == 0.0 || a_index == 1.0 ? -size / 2.0 : size / 2.0;
vec4 offsets = offsetMatrix * vec4(offsetX, offsetY, 0.0, 0.0);
gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0) + offsets;
float u = a_index == 0.0 || a_index == 3.0 ? 0.0 : 1.0;
float v = a_index == 0.0 || a_index == 1.0 ? 0.0 : 1.0;
v_texCoord = vec2(u, v);
v_color = ${DECODE_COLOR_EXPRESSION}
v_opacity = a_opacity;
}`;
/**
* Default point fragment shader.
* @type {string}
*/
export const POINT_FRAGMENT_SHADER = `
precision mediump float;
varying vec3 v_color;
varying float v_opacity;
void main(void) {
gl_FragColor = vec4(v_color, 1.0) * v_opacity;
}`;

View File

@@ -38,8 +38,8 @@ export const ShaderType = {
};
/**
* Uniform names used in the default shaders: `PROJECTION_MATRIX`, `OFFSET_SCALE_MATRIX`.
* and `OFFSET_ROTATION_MATRIX`.
* Names of uniforms made available to all shaders.
* Please note: changing these *will* break custom shaders!
* @enum {string}
*/
export const DefaultUniform = {
@@ -49,6 +49,8 @@ export const DefaultUniform = {
TIME: 'u_time',
ZOOM: 'u_zoom',
RESOLUTION: 'u_resolution',
SIZE_PX: 'u_sizePx',
PIXEL_RATIO: 'u_pixelRatio',
};
/**
@@ -206,11 +208,12 @@ function releaseCanvas(key) {
* Shaders must be compiled and assembled into a program like so:
* ```js
* // here we simply create two shaders and assemble them in a program which is then used
* // for subsequent rendering calls
* // for subsequent rendering calls; note how a frameState is required to set up a program,
* // as several default uniforms are computed from it (projection matrix, zoom level, etc.)
* const vertexShader = new WebGLVertex(VERTEX_SHADER);
* const fragmentShader = new WebGLFragment(FRAGMENT_SHADER);
* const program = this.context.getProgram(fragmentShader, vertexShader);
* helper.useProgram(this.program);
* helper.useProgram(this.program, frameState);
* ```
*
* Uniforms are defined using the `uniforms` option and can either be explicit values or callbacks taking the frame state as argument.
@@ -302,8 +305,6 @@ function releaseCanvas(key) {
* ```
*
* For an example usage of this class, refer to {@link module:ol/renderer/webgl/PointsLayer~WebGLPointsLayerRenderer}.
*
* @api
*/
class WebGLHelper extends Disposable {
/**
@@ -484,7 +485,6 @@ class WebGLHelper extends Disposable {
* the WebGL buffer, bind it, populate it, and add an entry to
* the cache.
* @param {import("./Buffer").default} buffer Buffer.
* @api
*/
bindBuffer(buffer) {
const gl = this.getGL();
@@ -505,7 +505,6 @@ class WebGLHelper extends Disposable {
* Update the data contained in the buffer array; this is required for the
* new data to be rendered
* @param {import("./Buffer").default} buffer Buffer.
* @api
*/
flushBufferData(buffer) {
const gl = this.getGL();
@@ -551,7 +550,6 @@ class WebGLHelper extends Disposable {
* subsequent draw calls.
* @param {import("../PluggableMap.js").FrameState} frameState current frame state
* @param {boolean} [opt_disableAlphaBlend] If true, no alpha blending will happen.
* @api
*/
prepareDraw(frameState, opt_disableAlphaBlend) {
const gl = this.getGL();
@@ -564,8 +562,6 @@ class WebGLHelper extends Disposable {
canvas.style.width = size[0] + 'px';
canvas.style.height = size[1] + 'px';
gl.useProgram(this.currentProgram_);
// loop backwards in post processes list
for (let i = this.postProcessPasses_.length - 1; i >= 0; i--) {
this.postProcessPasses_[i].init(frameState);
@@ -581,10 +577,6 @@ class WebGLHelper extends Disposable {
gl.ONE,
opt_disableAlphaBlend ? gl.ZERO : gl.ONE_MINUS_SRC_ALPHA
);
gl.useProgram(this.currentProgram_);
this.applyFrameState(frameState);
this.applyUniforms(frameState);
}
/**
@@ -609,17 +601,12 @@ class WebGLHelper extends Disposable {
gl.ONE,
opt_disableAlphaBlend ? gl.ZERO : gl.ONE_MINUS_SRC_ALPHA
);
gl.useProgram(this.currentProgram_);
this.applyFrameState(frameState);
this.applyUniforms(frameState);
}
/**
* Execute a draw call based on the currently bound program, texture, buffers, attributes.
* @param {number} start Start index.
* @param {number} end End index.
* @api
*/
drawElements(start, end) {
const gl = this.getGL();
@@ -660,7 +647,6 @@ class WebGLHelper extends Disposable {
/**
* @return {HTMLCanvasElement} Canvas.
* @api
*/
getCanvas() {
return this.canvas_;
@@ -669,7 +655,6 @@ class WebGLHelper extends Disposable {
/**
* Get the WebGL rendering context
* @return {WebGLRenderingContext} The rendering context.
* @api
*/
getGL() {
return this.gl_;
@@ -682,6 +667,7 @@ class WebGLHelper extends Disposable {
applyFrameState(frameState) {
const size = frameState.size;
const rotation = frameState.viewState.rotation;
const pixelRatio = frameState.pixelRatio;
const offsetScaleMatrix = resetTransform(this.offsetScaleMatrix_);
scaleTransform(offsetScaleMatrix, 2 / size[0], 2 / size[1]);
@@ -709,6 +695,8 @@ class WebGLHelper extends Disposable {
DefaultUniform.RESOLUTION,
frameState.viewState.resolution
);
this.setUniformFloatValue(DefaultUniform.PIXEL_RATIO, pixelRatio);
this.setUniformFloatVec2(DefaultUniform.SIZE_PX, [size[0], size[1]]);
}
/**
@@ -803,22 +791,19 @@ class WebGLHelper extends Disposable {
}
/**
* Use a program. If the program is already in use, this will return `false`.
* Set up a program for use. The program will be set as the current one. Then, the uniforms used
* in the program will be set based on the current frame state and the helper configuration.
* @param {WebGLProgram} program Program.
* @return {boolean} Changed.
* @api
* @param {import("../PluggableMap.js").FrameState} frameState Frame state.
*/
useProgram(program) {
if (program == this.currentProgram_) {
return false;
} else {
const gl = this.getGL();
gl.useProgram(program);
this.currentProgram_ = program;
this.uniformLocations_ = {};
this.attribLocations_ = {};
return true;
}
useProgram(program, frameState) {
const gl = this.getGL();
gl.useProgram(program);
this.currentProgram_ = program;
this.uniformLocations_ = {};
this.attribLocations_ = {};
this.applyFrameState(frameState);
this.applyUniforms(frameState);
}
/**
@@ -843,7 +828,6 @@ class WebGLHelper extends Disposable {
* @param {string} fragmentShaderSource Fragment shader source.
* @param {string} vertexShaderSource Vertex shader source.
* @return {WebGLProgram} Program
* @api
*/
getProgram(fragmentShaderSource, vertexShaderSource) {
const gl = this.getGL();
@@ -893,7 +877,6 @@ class WebGLHelper extends Disposable {
* Will get the location from the shader or the cache
* @param {string} name Uniform name
* @return {WebGLUniformLocation} uniformLocation
* @api
*/
getUniformLocation(name) {
if (this.uniformLocations_[name] === undefined) {
@@ -909,7 +892,6 @@ class WebGLHelper extends Disposable {
* Will get the location from the shader or the cache
* @param {string} name Attribute name
* @return {number} attribLocation
* @api
*/
getAttributeLocation(name) {
if (this.attribLocations_[name] === undefined) {
@@ -927,7 +909,6 @@ class WebGLHelper extends Disposable {
* @param {import("../PluggableMap.js").FrameState} frameState Frame state.
* @param {import("../transform").Transform} transform Transform to update.
* @return {import("../transform").Transform} The updated transform object.
* @api
*/
makeProjectionTransform(frameState, transform) {
const size = frameState.size;
@@ -953,12 +934,20 @@ class WebGLHelper extends Disposable {
* Give a value for a standard float uniform
* @param {string} uniform Uniform name
* @param {number} value Value
* @api
*/
setUniformFloatValue(uniform, value) {
this.getGL().uniform1f(this.getUniformLocation(uniform), value);
}
/**
* Give a value for a vec2 uniform
* @param {string} uniform Uniform name
* @param {Array<number>} value Array of length 4.
*/
setUniformFloatVec2(uniform, value) {
this.getGL().uniform2fv(this.getUniformLocation(uniform), value);
}
/**
* Give a value for a vec4 uniform
* @param {string} uniform Uniform name
@@ -972,7 +961,6 @@ class WebGLHelper extends Disposable {
* Give a value for a standard matrix4 uniform
* @param {string} uniform Uniform name
* @param {Array<number>} value Matrix value
* @api
*/
setUniformMatrixValue(uniform, value) {
this.getGL().uniformMatrix4fv(
@@ -1014,7 +1002,6 @@ class WebGLHelper extends Disposable {
* i.e. tell the GPU where to read the different attributes in the buffer. An error in the
* size/type/order of attributes will most likely break the rendering and throw a WebGL exception.
* @param {Array<AttributeDescription>} attributes Ordered list of attributes to read from the buffer
* @api
*/
enableAttributes(attributes) {
const stride = computeAttributesStride(attributes);
@@ -1056,7 +1043,6 @@ class WebGLHelper extends Disposable {
* @param {ImageData|HTMLImageElement|HTMLCanvasElement} [opt_data] Image data/object to bind to the texture
* @param {WebGLTexture} [opt_texture] Existing texture to reuse
* @return {WebGLTexture} The generated texture
* @api
*/
createTexture(size, opt_data, opt_texture) {
const gl = this.getGL();
@@ -1103,7 +1089,6 @@ class WebGLHelper extends Disposable {
* Compute a stride in bytes based on a list of attributes
* @param {Array<AttributeDescription>} attributes Ordered list of attributes
* @return {number} Stride, ie amount of values for each vertex in the vertex buffer
* @api
*/
export function computeAttributesStride(attributes) {
let stride = 0;

View File

@@ -2,60 +2,176 @@
* A worker that does cpu-heavy tasks related to webgl rendering.
* @module ol/worker/webgl
*/
import {
WebGLWorkerMessageType,
writePointFeatureToBuffers,
} from '../renderer/webgl/Layer.js';
import {WebGLWorkerMessageType} from '../render/webgl/constants.js';
import {assign} from '../obj.js';
import {
create as createTransform,
makeInverse as makeInverseTransform,
} from '../transform.js';
import {
writeLineSegmentToBuffers,
writePointFeatureToBuffers,
writePolygonTrianglesToBuffers,
} from '../render/webgl/utils.js';
/** @type {any} */
const worker = self;
worker.onmessage = (event) => {
const received = event.data;
if (received.type === WebGLWorkerMessageType.GENERATE_BUFFERS) {
// This is specific to point features (x, y, index)
const baseVertexAttrsCount = 3;
const baseInstructionsCount = 2;
switch (received.type) {
case WebGLWorkerMessageType.GENERATE_POINT_BUFFERS: {
// This is specific to point features (x, y, index)
const baseVertexAttrsCount = 3;
const baseInstructionsCount = 2;
const customAttrsCount = received.customAttributesCount;
const instructionsCount = baseInstructionsCount + customAttrsCount;
const renderInstructions = new Float32Array(received.renderInstructions);
const customAttrsCount = received.customAttributesCount;
const instructionsCount = baseInstructionsCount + customAttrsCount;
const renderInstructions = new Float32Array(received.renderInstructions);
const elementsCount = renderInstructions.length / instructionsCount;
const indicesCount = elementsCount * 6;
const verticesCount =
elementsCount * 4 * (customAttrsCount + baseVertexAttrsCount);
const indexBuffer = new Uint32Array(indicesCount);
const vertexBuffer = new Float32Array(verticesCount);
const elementsCount = renderInstructions.length / instructionsCount;
const indicesCount = elementsCount * 6;
const verticesCount =
elementsCount * 4 * (customAttrsCount + baseVertexAttrsCount);
const indexBuffer = new Uint32Array(indicesCount);
const vertexBuffer = new Float32Array(verticesCount);
let bufferPositions;
for (let i = 0; i < renderInstructions.length; i += instructionsCount) {
bufferPositions = writePointFeatureToBuffers(
renderInstructions,
i,
vertexBuffer,
indexBuffer,
customAttrsCount,
bufferPositions
let bufferPositions;
for (let i = 0; i < renderInstructions.length; i += instructionsCount) {
bufferPositions = writePointFeatureToBuffers(
renderInstructions,
i,
vertexBuffer,
indexBuffer,
customAttrsCount,
bufferPositions
);
}
/** @type {import('../render/webgl/constants.js').WebGLWorkerGenerateBuffersMessage} */
const message = assign(
{
vertexBuffer: vertexBuffer.buffer,
indexBuffer: indexBuffer.buffer,
renderInstructions: renderInstructions.buffer,
},
received
);
worker.postMessage(message, [
vertexBuffer.buffer,
indexBuffer.buffer,
renderInstructions.buffer,
]);
break;
}
case WebGLWorkerMessageType.GENERATE_LINE_STRING_BUFFERS: {
const vertices = [];
const indices = [];
/** @type {import('../renderer/webgl/Layer').WebGLWorkerGenerateBuffersMessage} */
const message = assign(
{
vertexBuffer: vertexBuffer.buffer,
indexBuffer: indexBuffer.buffer,
renderInstructions: renderInstructions.buffer,
},
received
);
const customAttrsCount = received.customAttributesCount;
const instructionsPerVertex = 2;
worker.postMessage(message, [
vertexBuffer.buffer,
indexBuffer.buffer,
renderInstructions.buffer,
]);
const renderInstructions = new Float32Array(received.renderInstructions);
let currentInstructionsIndex = 0;
const transform = received.renderInstructionsTransform;
const invertTransform = createTransform();
makeInverseTransform(invertTransform, transform);
let verticesCount, customAttributes;
while (currentInstructionsIndex < renderInstructions.length) {
customAttributes = Array.from(
renderInstructions.slice(
currentInstructionsIndex,
currentInstructionsIndex + customAttrsCount
)
);
currentInstructionsIndex += customAttrsCount;
verticesCount = renderInstructions[currentInstructionsIndex++];
// last point is only a segment end, do not loop over it
for (let i = 0; i < verticesCount - 1; i++) {
writeLineSegmentToBuffers(
renderInstructions,
currentInstructionsIndex + i * instructionsPerVertex,
currentInstructionsIndex + (i + 1) * instructionsPerVertex,
i > 0
? currentInstructionsIndex + (i - 1) * instructionsPerVertex
: null,
i < verticesCount - 2
? currentInstructionsIndex + (i + 2) * instructionsPerVertex
: null,
vertices,
indices,
customAttributes,
transform,
invertTransform
);
}
currentInstructionsIndex += verticesCount * instructionsPerVertex;
}
const indexBuffer = Uint32Array.from(indices);
const vertexBuffer = Float32Array.from(vertices);
/** @type {import('../render/webgl/constants.js').WebGLWorkerGenerateBuffersMessage} */
const message = assign(
{
vertexBuffer: vertexBuffer.buffer,
indexBuffer: indexBuffer.buffer,
renderInstructions: renderInstructions.buffer,
},
received
);
worker.postMessage(message, [
vertexBuffer.buffer,
indexBuffer.buffer,
renderInstructions.buffer,
]);
break;
}
case WebGLWorkerMessageType.GENERATE_POLYGON_BUFFERS: {
const vertices = [];
const indices = [];
const customAttrsCount = received.customAttributesCount;
const renderInstructions = new Float32Array(received.renderInstructions);
let currentInstructionsIndex = 0;
while (currentInstructionsIndex < renderInstructions.length) {
currentInstructionsIndex = writePolygonTrianglesToBuffers(
renderInstructions,
currentInstructionsIndex,
vertices,
indices,
customAttrsCount
);
}
const indexBuffer = Uint32Array.from(indices);
const vertexBuffer = Float32Array.from(vertices);
/** @type {import('../render/webgl/constants.js').WebGLWorkerGenerateBuffersMessage} */
const message = assign(
{
vertexBuffer: vertexBuffer.buffer,
indexBuffer: indexBuffer.buffer,
renderInstructions: renderInstructions.buffer,
},
received
);
worker.postMessage(message, [
vertexBuffer.buffer,
indexBuffer.buffer,
renderInstructions.buffer,
]);
break;
}
default:
// pass
}
};