Merge pull request #9994 from jahow/add-webgl-layer-type

Add a new layer type: WebGLPointsLayer
This commit is contained in:
Olivier Guyot
2019-09-26 14:55:09 +02:00
committed by GitHub
20 changed files with 1366 additions and 633 deletions

View File

@@ -2,121 +2,42 @@
* @module ol/renderer/webgl/PointsLayer
*/
import WebGLArrayBuffer from '../../webgl/Buffer.js';
import {DYNAMIC_DRAW, ARRAY_BUFFER, ELEMENT_ARRAY_BUFFER, FLOAT} from '../../webgl.js';
import {DefaultAttrib, DefaultUniform} from '../../webgl/Helper.js';
import {ARRAY_BUFFER, DYNAMIC_DRAW, ELEMENT_ARRAY_BUFFER} from '../../webgl.js';
import {AttributeType, DefaultUniform} from '../../webgl/Helper.js';
import GeometryType from '../../geom/GeometryType.js';
import WebGLLayerRenderer, {
colorDecodeId,
colorEncodeId,
getBlankImageData,
POINT_INSTRUCTIONS_COUNT, POINT_VERTEX_STRIDE, WebGLWorkerMessageType,
writePointFeatureInstructions
} from './Layer.js';
import WebGLLayerRenderer, {colorDecodeId, colorEncodeId, WebGLWorkerMessageType} from './Layer.js';
import ViewHint from '../../ViewHint.js';
import {createEmpty, equals} from '../../extent.js';
import {
apply as applyTransform,
create as createTransform,
makeInverse as makeInverseTransform,
multiply as multiplyTransform,
apply as applyTransform
multiply as multiplyTransform
} from '../../transform.js';
import {create as createWebGLWorker} from '../../worker/webgl.js';
import {getUid} from '../../util.js';
import WebGLRenderTarget from '../../webgl/RenderTarget.js';
import {assert} from '../../asserts.js';
const VERTEX_SHADER = `
precision mediump float;
attribute vec2 a_position;
attribute vec2 a_texCoord;
attribute float a_rotateWithView;
attribute vec2 a_offsets;
attribute float a_opacity;
attribute vec4 a_color;
uniform mat4 u_projectionMatrix;
uniform mat4 u_offsetScaleMatrix;
uniform mat4 u_offsetRotateMatrix;
varying vec2 v_texCoord;
varying float v_opacity;
varying vec4 v_color;
void main(void) {
mat4 offsetMatrix = u_offsetScaleMatrix;
if (a_rotateWithView == 1.0) {
offsetMatrix = u_offsetScaleMatrix * u_offsetRotateMatrix;
}
vec4 offsets = offsetMatrix * vec4(a_offsets, 0.0, 0.0);
gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0) + offsets;
v_texCoord = a_texCoord;
v_opacity = a_opacity;
v_color = a_color;
}`;
const FRAGMENT_SHADER = `
precision mediump float;
uniform sampler2D u_texture;
varying vec2 v_texCoord;
varying float v_opacity;
varying vec4 v_color;
void main(void) {
if (v_opacity == 0.0) {
discard;
}
vec4 textureColor = texture2D(u_texture, v_texCoord);
gl_FragColor = v_color * textureColor;
gl_FragColor.a *= v_opacity;
gl_FragColor.rgb *= gl_FragColor.a;
}`;
const HIT_FRAGMENT_SHADER = `
precision mediump float;
uniform sampler2D u_texture;
varying vec2 v_texCoord;
varying float v_opacity;
varying vec4 v_color;
void main(void) {
if (v_opacity == 0.0) {
discard;
}
vec4 textureColor = texture2D(u_texture, v_texCoord);
if (textureColor.a < 0.1) {
discard;
}
gl_FragColor = v_color;
}`;
/**
* @typedef {Object} CustomAttribute A description of a custom attribute to be passed on to the GPU, with a value different
* for each feature.
* @property {string} name Attribute name.
* @property {function(import("../../Feature").default):number} callback This callback computes the numerical value of the
* attribute for a given feature.
*/
/**
* @typedef {Object} Options
* @property {function(import("../../Feature").default):number} [sizeCallback] Will be called on every feature in the
* source to compute the size of the quad on screen (in pixels). This is only done on source change.
* @property {function(import("../../Feature").default, number):number} [coordCallback] Will be called on every feature in the
* source to compute the coordinate of the quad center on screen (in pixels). This is only done on source change.
* The second argument is 0 for `x` component and 1 for `y`.
* @property {function(import("../../Feature").default, number):number} [texCoordCallback] Will be called on every feature in the
* source to compute the texture coordinates of each corner of the quad (without effect if no `texture` option defined). This is only done on source change.
* The second argument is 0 for `u0` component, 1 for `v0`, 2 for `u1`, and 3 for `v1`.
* @property {function(import("../../Feature").default, Array<number>=):Array<number>} [colorCallback] Will be called on every feature in the
* source to compute the color for use in the fragment shader (available as the `v_color` varying). This is only done on source change.
* The return value should be between an array of R, G, B, A values between 0 and 1. To reduce unnecessary
* allocation, the function is called with a reusable array that can serve as the return value after updating
* the R, G, B, and A values.
* @property {function(import("../../Feature").default):number} [opacityCallback] Will be called on every feature in the
* source to compute the opacity of the quad on screen (from 0 to 1). This is only done on source change.
* Note: this is multiplied with the color of the point which can also have an alpha value < 1.
* @property {function(import("../../Feature").default):boolean} [rotateWithViewCallback] Will be called on every feature in the
* source to compute whether the quad on screen must stay upwards (`false`) or follow the view rotation (`true`). Default is `false`.
* This is only done on source change.
* @property {HTMLCanvasElement|HTMLImageElement|ImageData} [texture] Texture to use on points. `texCoordCallback` and `sizeCallback`
* must be defined for this to have any effect.
* @property {string} [vertexShader] Vertex shader source
* @property {string} [fragmentShader] Fragment shader source
* @property {Array<CustomAttribute>} [attributes] These attributes will be read from the features in the source and then
* passed to the GPU. The `name` property of each attribute will serve as its identifier:
* * In the vertex shader as an `attribute` by prefixing it with `a_`
* * In the fragment shader as a `varying` by prefixing it with `v_`
* Please note that these can only be numerical values.
* @property {string} vertexShader Vertex shader source, mandatory.
* @property {string} fragmentShader Fragment shader source, mandatory.
* @property {string} [hitVertexShader] Vertex shader source for hit detection rendering.
* @property {string} [hitFragmentShader] Fragment shader source for hit detection rendering.
* @property {Object.<string,import("../../webgl/Helper").UniformValue>} [uniforms] Uniform definitions for the post process steps
* Please note that `u_texture` is reserved for the main texture slot.
* @property {Array<import("./Layer").PostProcessesOptions>} [postProcesses] Post-processes definitions
@@ -128,13 +49,38 @@ const HIT_FRAGMENT_SHADER = `
* All features will be rendered as quads (two triangles forming a square). New data will be flushed to the GPU
* every time the vector source changes.
*
* Use shaders to customize the final output. The following attributes are available:
* * `vec2 a_position`
* * `vec2 a_texCoord`
* * `vec2 a_offsets`
* * `float a_rotateWithView`
* * `float a_opacity`
* * `float a_color`
* You need to provide vertex and fragment shaders for rendering. This can be done using
* {@link module:ol/webgl/ShaderBuilder} utilities. These shaders shall expect a `a_position` attribute
* containing the screen-space projected center of the quad, as well as a `a_index` attribute
* whose value (0, 1, 2 or 3) indicates which quad vertex is currently getting processed (see structure below).
*
* To include variable attributes in the shaders, you need to declare them using the `attributes` property of
* the options object like so:
* ```js
* new WebGLPointsLayerRenderer(layer, {
* attributes: [
* {
* name: 'size',
* callback: function(feature) {
* // compute something with the feature
* }
* },
* {
* name: 'weight',
* callback: function(feature) {
* // compute something with the feature
* }
* },
* ],
* vertexShader:
* // shader using attribute a_weight and a_size
* fragmentShader:
* // shader using varying v_weight and v_size
* ```
*
* To enable hit detection, you must as well provide dedicated shaders using the `hitVertexShader`
* and `hitFragmentShader` properties. These shall expect the `a_hitColor` attribute to contain
* the final color that will have to be output for hit detection to work.
*
* The following uniform is used for the main texture: `u_texture`.
*
@@ -157,74 +103,16 @@ const HIT_FRAGMENT_SHADER = `
*
* This uses {@link module:ol/webgl/Helper~WebGLHelper} internally.
*
* Default shaders are shown hereafter:
*
* * Vertex shader:
* ```
* precision mediump float;
*
* attribute vec2 a_position;
* attribute vec2 a_texCoord;
* attribute float a_rotateWithView;
* attribute vec2 a_offsets;
* attribute float a_opacity;
* attribute vec4 a_color;
*
* uniform mat4 u_projectionMatrix;
* uniform mat4 u_offsetScaleMatrix;
* uniform mat4 u_offsetRotateMatrix;
*
* varying vec2 v_texCoord;
* varying float v_opacity;
* varying vec4 v_color;
*
* void main(void) {
* mat4 offsetMatrix = u_offsetScaleMatrix;
* if (a_rotateWithView == 1.0) {
* offsetMatrix = u_offsetScaleMatrix * u_offsetRotateMatrix;
* }
* vec4 offsets = offsetMatrix * vec4(a_offsets, 0.0, 0.0);
* gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0) + offsets;
* v_texCoord = a_texCoord;
* v_opacity = a_opacity;
* v_color = a_color;
* }
* ```
*
* * Fragment shader:
* ```
* precision mediump float;
*
* uniform sampler2D u_texture;
*
* varying vec2 v_texCoord;
* varying float v_opacity;
* varying vec4 v_color;
*
* void main(void) {
* if (v_opacity == 0.0) {
* discard;
* }
* vec4 textureColor = texture2D(u_texture, v_texCoord);
* gl_FragColor = v_color * textureColor;
* gl_FragColor.a *= v_opacity;
* gl_FragColor.rgb *= gl_FragColor.a;
* }
* ```
*
* @api
*/
class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
/**
* @param {import("../../layer/Vector.js").default} vectorLayer Vector layer.
* @param {Options=} [opt_options] Options.
* @param {Options=} options Options.
*/
constructor(vectorLayer, opt_options) {
const options = opt_options || {};
constructor(vectorLayer, options) {
const uniforms = options.uniforms || {};
uniforms.u_texture = options.texture || getBlankImageData();
const projectionMatrixTransform = createTransform();
uniforms[DefaultUniform.PROJECTION_MATRIX] = projectionMatrixTransform;
@@ -240,36 +128,68 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
this.indicesBuffer_ = new WebGLArrayBuffer(ELEMENT_ARRAY_BUFFER, DYNAMIC_DRAW);
this.program_ = this.helper.getProgram(
options.fragmentShader || FRAGMENT_SHADER,
options.vertexShader || VERTEX_SHADER
);
this.hitProgram_ = this.helper.getProgram(
HIT_FRAGMENT_SHADER,
VERTEX_SHADER
options.fragmentShader,
options.vertexShader
);
this.sizeCallback_ = options.sizeCallback || function() {
return 1;
};
this.coordCallback_ = options.coordCallback || function(feature, index) {
const geom = feature.getGeometry();
return geom.getCoordinates()[index];
};
this.opacityCallback_ = options.opacityCallback || function() {
return 1;
};
this.texCoordCallback_ = options.texCoordCallback || function(feature, index) {
return index < 2 ? 0 : 1;
};
/**
* @type {boolean}
* @private
*/
this.hitDetectionEnabled_ = options.hitFragmentShader && options.hitVertexShader ? true : false;
this.colorArray_ = [1, 1, 1, 1];
this.colorCallback_ = options.colorCallback || function(feature, color) {
return this.colorArray_;
};
this.hitProgram_ = this.hitDetectionEnabled_ && this.helper.getProgram(
options.hitFragmentShader,
options.hitVertexShader
);
this.rotateWithViewCallback_ = options.rotateWithViewCallback || function() {
return false;
};
const customAttributes = options.attributes ?
options.attributes.map(function(attribute) {
return {
name: 'a_' + attribute.name,
size: 1,
type: AttributeType.FLOAT
};
}) : [];
/**
* A list of attributes used by the renderer. By default only the position and
* index of the vertex (0 to 3) are required.
* @type {Array<import('../../webgl/Helper.js').AttributeDescription>}
*/
this.attributes = [{
name: 'a_position',
size: 2,
type: AttributeType.FLOAT
}, {
name: 'a_index',
size: 1,
type: AttributeType.FLOAT
}].concat(customAttributes);
/**
* A list of attributes used for hit detection.
* @type {Array<import('../../webgl/Helper.js').AttributeDescription>}
*/
this.hitDetectionAttributes = [{
name: 'a_position',
size: 2,
type: AttributeType.FLOAT
}, {
name: 'a_index',
size: 1,
type: AttributeType.FLOAT
}, {
name: 'a_hitColor',
size: 4,
type: AttributeType.FLOAT
}, {
name: 'a_featureUid',
size: 1,
type: AttributeType.FLOAT
}].concat(customAttributes);
this.customAttributes = options.attributes ? options.attributes : [];
this.previousExtent_ = createEmpty();
@@ -312,7 +232,7 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
* @type {WebGLRenderTarget}
* @private
*/
this.hitRenderTarget_ = new WebGLRenderTarget(this.helper);
this.hitRenderTarget_ = this.hitDetectionEnabled_ && new WebGLRenderTarget(this.helper);
this.worker_ = createWebGLWorker();
this.worker_.addEventListener('message', function(event) {
@@ -357,8 +277,10 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
canvas.style.opacity = opacity;
}
this.renderHitDetection(frameState);
this.hitRenderTarget_.clearCachedData();
if (this.hitDetectionEnabled_) {
this.renderHitDetection(frameState);
this.hitRenderTarget_.clearCachedData();
}
return canvas;
}
@@ -371,8 +293,6 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
const vectorSource = vectorLayer.getSource();
const viewState = frameState.viewState;
const stride = POINT_VERTEX_STRIDE;
// the source has changed: clear the feature cache & reload features
const sourceChanged = this.sourceRevision_ < vectorSource.getRevision();
if (sourceChanged) {
@@ -401,13 +321,7 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
this.helper.bindBuffer(this.verticesBuffer_);
this.helper.bindBuffer(this.indicesBuffer_);
const bytesPerFloat = Float32Array.BYTES_PER_ELEMENT;
this.helper.enableAttributeArray(DefaultAttrib.POSITION, 2, FLOAT, bytesPerFloat * stride, 0);
this.helper.enableAttributeArray(DefaultAttrib.OFFSETS, 2, FLOAT, bytesPerFloat * stride, bytesPerFloat * 2);
this.helper.enableAttributeArray(DefaultAttrib.TEX_COORD, 2, FLOAT, bytesPerFloat * stride, bytesPerFloat * 4);
this.helper.enableAttributeArray(DefaultAttrib.OPACITY, 1, FLOAT, bytesPerFloat * stride, bytesPerFloat * 6);
this.helper.enableAttributeArray(DefaultAttrib.ROTATE_WITH_VIEW, 1, FLOAT, bytesPerFloat * stride, bytesPerFloat * 7);
this.helper.enableAttributeArray(DefaultAttrib.COLOR, 4, FLOAT, bytesPerFloat * stride, bytesPerFloat * 8);
this.helper.enableAttributes(this.attributes);
return true;
}
@@ -426,76 +340,72 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
this.helper.makeProjectionTransform(frameState, projectionTransform);
const features = vectorSource.getFeatures();
const totalInstructionsCount = POINT_INSTRUCTIONS_COUNT * features.length;
// here we anticipate the amount of render instructions that we well generate
// this can be done since we know that for normal render we only have x, y as base instructions,
// and x, y, r, g, b, a and featureUid for hit render instructions
// and we also know the amount of custom attributes to append to these
const totalInstructionsCount = (2 + this.customAttributes.length) * features.length;
if (!this.renderInstructions_ || this.renderInstructions_.length !== totalInstructionsCount) {
this.renderInstructions_ = new Float32Array(totalInstructionsCount);
}
if (!this.hitRenderInstructions_ || this.hitRenderInstructions_.length !== totalInstructionsCount) {
this.hitRenderInstructions_ = new Float32Array(totalInstructionsCount);
if (this.hitDetectionEnabled_) {
const totalHitInstructionsCount = (7 + this.customAttributes.length) * features.length;
if (!this.hitRenderInstructions_ || this.hitRenderInstructions_.length !== totalHitInstructionsCount) {
this.hitRenderInstructions_ = new Float32Array(totalHitInstructionsCount);
}
}
// loop on features to fill the buffer
let feature;
const tmpCoords = [];
const tmpColor = [];
let elementIndex = 0;
let u0, v0, u1, v1, size, opacity, rotateWithView, color;
let renderIndex = 0;
let hitIndex = 0;
let hitColor;
for (let i = 0; i < features.length; i++) {
feature = features[i];
if (!feature.getGeometry() || feature.getGeometry().getType() !== GeometryType.POINT) {
continue;
}
tmpCoords[0] = this.coordCallback_(feature, 0);
tmpCoords[1] = this.coordCallback_(feature, 1);
tmpCoords[0] = feature.getGeometry().getFlatCoordinates()[0];
tmpCoords[1] = feature.getGeometry().getFlatCoordinates()[1];
applyTransform(projectionTransform, tmpCoords);
u0 = this.texCoordCallback_(feature, 0);
v0 = this.texCoordCallback_(feature, 1);
u1 = this.texCoordCallback_(feature, 2);
v1 = this.texCoordCallback_(feature, 3);
size = this.sizeCallback_(feature);
opacity = this.opacityCallback_(feature);
rotateWithView = this.rotateWithViewCallback_(feature);
color = this.colorCallback_(feature, this.colorArray_);
hitColor = colorEncodeId(hitIndex + 6, tmpColor);
writePointFeatureInstructions(
this.renderInstructions_,
elementIndex,
tmpCoords[0],
tmpCoords[1],
u0,
v0,
u1,
v1,
size,
opacity,
rotateWithView,
color
);
this.renderInstructions_[renderIndex++] = tmpCoords[0];
this.renderInstructions_[renderIndex++] = tmpCoords[1];
// for hit detection, the feature uid is saved in the opacity value
// and the index of the opacity value is encoded in the color values
elementIndex = writePointFeatureInstructions(
this.hitRenderInstructions_,
elementIndex,
tmpCoords[0],
tmpCoords[1],
u0,
v0,
u1,
v1,
size,
opacity > 0 ? Number(getUid(feature)) : 0,
rotateWithView,
colorEncodeId(elementIndex + 7, tmpColor)
);
if (this.hitDetectionEnabled_) {
this.hitRenderInstructions_[hitIndex++] = tmpCoords[0];
this.hitRenderInstructions_[hitIndex++] = tmpCoords[1];
this.hitRenderInstructions_[hitIndex++] = hitColor[0];
this.hitRenderInstructions_[hitIndex++] = hitColor[1];
this.hitRenderInstructions_[hitIndex++] = hitColor[2];
this.hitRenderInstructions_[hitIndex++] = hitColor[3];
this.hitRenderInstructions_[hitIndex++] = Number(getUid(feature));
}
// pushing custom attributes
let value;
for (let j = 0; j < this.customAttributes.length; j++) {
value = this.customAttributes[j].callback(feature);
this.renderInstructions_[renderIndex++] = value;
if (this.hitDetectionEnabled_) {
this.hitRenderInstructions_[hitIndex++] = value;
}
}
}
/** @type import('./Layer').WebGLWorkerGenerateBuffersMessage */
const message = {
type: WebGLWorkerMessageType.GENERATE_BUFFERS,
renderInstructions: this.renderInstructions_.buffer
renderInstructions: this.renderInstructions_.buffer,
customAttributesCount: this.customAttributes.length
};
// additional properties will be sent back as-is by the worker
message['projectionTransform'] = projectionTransform;
@@ -503,20 +413,24 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
this.renderInstructions_ = null;
/** @type import('./Layer').WebGLWorkerGenerateBuffersMessage */
const hitMessage = {
type: WebGLWorkerMessageType.GENERATE_BUFFERS,
renderInstructions: this.hitRenderInstructions_.buffer
};
hitMessage['projectionTransform'] = projectionTransform;
hitMessage['hitDetection'] = true;
this.worker_.postMessage(hitMessage, [this.hitRenderInstructions_.buffer]);
this.hitRenderInstructions_ = null;
if (this.hitDetectionEnabled_) {
const hitMessage = {
type: WebGLWorkerMessageType.GENERATE_BUFFERS,
renderInstructions: this.hitRenderInstructions_.buffer,
customAttributesCount: 5 + this.customAttributes.length
};
hitMessage['projectionTransform'] = projectionTransform;
hitMessage['hitDetection'] = true;
this.worker_.postMessage(hitMessage, [this.hitRenderInstructions_.buffer]);
this.hitRenderInstructions_ = null;
}
}
/**
* @inheritDoc
*/
forEachFeatureAtCoordinate(coordinate, frameState, hitTolerance, callback, declutteredFeatures) {
assert(this.hitDetectionEnabled_, 66);
if (!this.hitRenderInstructions_) {
return;
}
@@ -546,9 +460,8 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
* @param {import("../../PluggableMap.js").FrameState} frameState current frame state
*/
renderHitDetection(frameState) {
// skip render entirely if vertices buffers for display & hit detection have different sizes
// this typically means both buffers are temporarily out of sync
if (this.hitVerticesBuffer_.getSize() !== this.verticesBuffer_.getSize()) {
// skip render entirely if vertex buffers not ready/generated yet
if (!this.hitVerticesBuffer_.getSize()) {
return;
}
@@ -560,18 +473,19 @@ class WebGLPointsLayerRenderer extends WebGLLayerRenderer {
this.helper.bindBuffer(this.hitVerticesBuffer_);
this.helper.bindBuffer(this.indicesBuffer_);
const stride = POINT_VERTEX_STRIDE;
const bytesPerFloat = Float32Array.BYTES_PER_ELEMENT;
this.helper.enableAttributeArray(DefaultAttrib.POSITION, 2, FLOAT, bytesPerFloat * stride, 0);
this.helper.enableAttributeArray(DefaultAttrib.OFFSETS, 2, FLOAT, bytesPerFloat * stride, bytesPerFloat * 2);
this.helper.enableAttributeArray(DefaultAttrib.TEX_COORD, 2, FLOAT, bytesPerFloat * stride, bytesPerFloat * 4);
this.helper.enableAttributeArray(DefaultAttrib.OPACITY, 1, FLOAT, bytesPerFloat * stride, bytesPerFloat * 6);
this.helper.enableAttributeArray(DefaultAttrib.ROTATE_WITH_VIEW, 1, FLOAT, bytesPerFloat * stride, bytesPerFloat * 7);
this.helper.enableAttributeArray(DefaultAttrib.COLOR, 4, FLOAT, bytesPerFloat * stride, bytesPerFloat * 8);
this.helper.enableAttributes(this.hitDetectionAttributes);
const renderCount = this.indicesBuffer_.getSize();
this.helper.drawElements(0, renderCount);
}
/**
* @inheritDoc
*/
disposeInternal() {
this.worker_.terminate();
super.disposeInternal();
}
}
export default WebGLPointsLayerRenderer;