Merge pull request #8951 from jahow/remove-webgl

Remove legacy WebGL code & implement a lightweight API for point rendering
This commit is contained in:
Tim Schaub
2018-11-16 14:55:00 +01:00
committed by GitHub
101 changed files with 1753 additions and 10729 deletions

View File

@@ -1,13 +0,0 @@
SRC_GLSL := $(shell find src -type f -name '*.glsl')
SRC_SHADER_JS := $(patsubst %shader.glsl,%shader.js,$(SRC_GLSL))
SRC_SHADERLOCATIONS_JS := $(patsubst %shader.glsl,%shader/Locations.js,$(SRC_GLSL))
.PHONY: shaders
shaders: $(SRC_SHADER_JS) $(SRC_SHADERLOCATIONS_JS)
%shader.js: %shader.glsl src/ol/webgl/shader.mustache tasks/glslunit.js
@node tasks/glslunit.js --input $< | ./node_modules/.bin/mustache - src/ol/webgl/shader.mustache > $@
%shader/Locations.js: %shader.glsl src/ol/webgl/shaderlocations.mustache tasks/glslunit.js
@mkdir -p $(@D)
@node tasks/glslunit.js --input $< | ./node_modules/.bin/mustache - src/ol/webgl/shaderlocations.mustache > $@

View File

@@ -10,6 +10,28 @@ Breaking change: layers can no longer be shared between several `Map` objects.
Breaking change: the `Graticule` control has been replaced by a layer also called `Graticule`, found in `ol/layer/Graticule`.
The API remains similar.
#### Breaking change: drop of support for most of WebGL features
The WebGL map and layers renderers are gone, replaced by a `WebGLHelper` function that provides a lightweight,
low-level access to the WebGL API. This is implemented in a new `WebGLPointsLayer` which does simple rendering of large number
of points with custom shaders.
This is now used in the `Heatmap` layer.
The removed classes and components are:
* `WebGLMap` and `WebGLMapRenderer`
* `WebGLLayerRenderer`
* `WebGLImageLayer` and `WebGLImageLayerRenderer`
* `WebGLTileLayer` and `WebGLTileLayerRenderer`
* `WebGLVectorLayer` and `WebGLVectorLayerRenderer`
* `WebGLReplay` and derived classes, along with associated shaders
* `WebGLReplayGroup`
* `WebGLImmediateRenderer`
* `WebGLMap`
* The shader build process using `mustache` and the `Makefile` at the root
### v5.3.0
#### The `getUid` function returns string

View File

@@ -17,7 +17,7 @@ OpenLayers is available as [`ol` npm package](https://npmjs.com/package/ol), whi
## Renderers and Browser Support
By default, OpenLayers uses a performance optimized Canvas renderer. An experimental WebGL renderer (without text rendering support) is also available.
By default, OpenLayers uses a performance optimized Canvas renderer.
OpenLayers runs on all modern browsers that support [HTML5](https://html.spec.whatwg.org/multipage/) and [ECMAScript 5](http://www.ecma-international.org/ecma-262/5.1/). This includes Chrome, Firefox, Safari and Edge. For older browsers and platforms like Internet Explorer (down to version 9) and Android 4.x, [polyfills](http://polyfill.io), the application bundle needs to be transpiled (e.g. using [Babel](https://babeljs.io)) and bundled with polyfills for `requestAnimationFrame`, `Element.prototype.classList` and `URL`.

View File

@@ -1,4 +1,4 @@
import Map from '../src/ol/Map.js';
import Map from '../src/ol/CompositeMap.js';
import View from '../src/ol/View.js';
import KML from '../src/ol/format/KML.js';
import {Heatmap as HeatmapLayer, Tile as TileLayer} from '../src/ol/layer.js';

View File

@@ -1,10 +0,0 @@
---
layout: example.html
title: Icon Sprites with WebGL
shortdesc: Icon sprite with WebGL
docs: >
<p>In this example a sprite image is used for the icon styles. Using a sprite is required to get good performance with WebGL.</p>
tags: "webgl, icon, sprite, vector, point"
---
<div id="map" class="map"></div>
<div id="info">&nbsp;</div>

View File

@@ -1,136 +0,0 @@
import Feature from '../src/ol/Feature.js';
import Map from '../src/ol/WebGLMap.js';
import View from '../src/ol/View.js';
import Point from '../src/ol/geom/Point.js';
import VectorLayer from '../src/ol/layer/WebGLVector.js';
import VectorSource from '../src/ol/source/Vector.js';
import {Icon, Style} from '../src/ol/style.js';
const iconInfo = [{
offset: [0, 0],
opacity: 1.0,
rotateWithView: true,
rotation: 0.0,
scale: 1.0,
size: [55, 55]
}, {
offset: [110, 86],
opacity: 0.75,
rotateWithView: false,
rotation: Math.PI / 2.0,
scale: 1.25,
size: [55, 55]
}, {
offset: [55, 0],
opacity: 0.5,
rotateWithView: true,
rotation: Math.PI / 3.0,
scale: 1.5,
size: [55, 86]
}, {
offset: [212, 0],
opacity: 1.0,
rotateWithView: true,
rotation: 0.0,
scale: 1.0,
size: [44, 44]
}];
let i;
const iconCount = iconInfo.length;
const icons = new Array(iconCount);
for (i = 0; i < iconCount; ++i) {
const info = iconInfo[i];
icons[i] = new Icon({
offset: info.offset,
opacity: info.opacity,
rotateWithView: info.rotateWithView,
rotation: info.rotation,
scale: info.scale,
size: info.size,
crossOrigin: 'anonymous',
src: 'data/Butterfly.png'
});
}
const featureCount = 50000;
const features = new Array(featureCount);
let feature, geometry;
const e = 25000000;
for (i = 0; i < featureCount; ++i) {
geometry = new Point(
[2 * e * Math.random() - e, 2 * e * Math.random() - e]);
feature = new Feature(geometry);
feature.setStyle(
new Style({
image: icons[i % (iconCount - 1)]
})
);
features[i] = feature;
}
const vectorSource = new VectorSource({
features: features
});
const vector = new VectorLayer({
source: vectorSource
});
const map = new Map({
layers: [vector],
target: document.getElementById('map'),
view: new View({
center: [0, 0],
zoom: 5
})
});
const overlayFeatures = [];
for (i = 0; i < featureCount; i += 30) {
const clone = features[i].clone();
clone.setStyle(null);
overlayFeatures.push(clone);
}
new VectorLayer({
map: map,
source: new VectorSource({
features: overlayFeatures
}),
style: new Style({
image: icons[iconCount - 1]
})
});
map.on('click', function(evt) {
const info = document.getElementById('info');
info.innerHTML =
'Hold on a second, while I catch those butterflies for you ...';
window.setTimeout(function() {
const features = [];
map.forEachFeatureAtPixel(evt.pixel, function(feature) {
features.push(feature);
return false;
});
if (features.length === 1) {
info.innerHTML = 'Got one butterfly';
} else if (features.length > 1) {
info.innerHTML = 'Got ' + features.length + ' butterflies';
} else {
info.innerHTML = 'Couldn\'t catch a single butterfly';
}
}, 1);
});
map.on('pointermove', function(evt) {
if (evt.dragging) {
return;
}
const pixel = map.getEventPixel(evt.originalEvent);
const hit = map.hasFeatureAtPixel(pixel);
map.getTarget().style.cursor = hit ? 'pointer' : '';
});

View File

@@ -1,12 +0,0 @@
---
layout: example.html
title: Layer Clipping with WebGL
shortdesc: Layer WebGL clipping example.
docs: >
This example shows how to use the <code>precompose</code> and <code>postcompose</code> rendering hooks to clip layers using WebGL.
tags: "clipping, webgl, openstreetmap"
---
<div id="map" class="map"></div>
<div id="no-webgl" class="alert alert-danger" style="display: none">
This example requires a browser that supports <a href="http://get.webgl.org/">WebGL</a>.
</div>

View File

@@ -1,101 +0,0 @@
import Map from '../src/ol/WebGLMap.js';
import View from '../src/ol/View.js';
import {WEBGL} from '../src/ol/has.js';
import TileLayer from '../src/ol/layer/WebGLTile.js';
import OSM from '../src/ol/source/OSM.js';
if (!WEBGL) {
const info = document.getElementById('no-webgl');
/**
* display error message
*/
info.style.display = '';
} else {
const osm = new TileLayer({
source: new OSM()
});
const map = new Map({
layers: [osm],
target: 'map',
view: new View({
center: [0, 0],
zoom: 2
})
});
const fragmentShaderSource = [
'precision mediump float;',
'void main() {',
'}'
].join('');
const vertexShaderSource = [
'attribute vec2 a_position;',
'void main() {',
' gl_Position = vec4(a_position, 0, 1);',
'}'
].join('');
osm.on('precompose', function(event) {
const context = event.glContext;
const gl = context.getGL();
const program = gl.createProgram();
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderSource);
gl.compileShader(vertexShader);
gl.attachShader(program, vertexShader);
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderSource);
gl.compileShader(fragmentShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
context.useProgram(program);
const positionLocation = gl.getAttribLocation(program, 'a_position');
gl.enable(gl.STENCIL_TEST);
gl.colorMask(false, false, false, false);
gl.stencilOp(gl.KEEP, gl.KEEP, gl.REPLACE);
gl.stencilFunc(gl.ALWAYS, 1, 0xff);
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
// first band
-1.0, -1.0, -0.75, -1.0, -1.0, 1.0,
-1.0, 1.0, -0.75, -1.0, -0.75, 1.0,
// second band
-0.5, -1.0, -0.25, -1.0, -0.5, 1.0,
-0.5, 1.0, -0.25, -1.0, -0.25, 1.0,
// third band
0.0, -1.0, 0.25, -1.0, 0.0, 1.0,
0.0, 1.0, 0.25, -1.0, 0.25, 1.0,
// forth band
0.5, -1.0, 0.75, -1.0, 0.5, 1.0,
0.5, 1.0, 0.75, -1.0, 0.75, 1.0
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLES, 0, 24);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.deleteBuffer(buffer);
gl.colorMask(true, true, true, true);
gl.stencilFunc(gl.NOTEQUAL, 0, 0xff);
gl.stencilOp(gl.KEEP, gl.KEEP, gl.KEEP);
});
osm.on('postcompose', function(event) {
const context = event.glContext;
const gl = context.getGL();
gl.disable(gl.STENCIL_TEST);
});
}

View File

@@ -1,7 +0,0 @@
@media (min-width: 800px) {
.half {
padding: 0 10px;
width: 50%;
float: left;
}
}

View File

@@ -1,16 +0,0 @@
---
layout: example.html
title: Shared Views
shortdesc: Two maps with different renderers share view properties
docs: >
Two maps (one with the Canvas renderer, one with the WebGL renderer) share the same center, resolution, rotation and layers.
tags: "side-by-side, canvas, webgl"
---
<div class="half">
<h4>Canvas</h4>
<div id="canvasMap" class="map"></div>
</div>
<div class="half">
<h4>WebGL</h4>
<div id="webglMap" class="map"></div>
</div>

View File

@@ -1,31 +0,0 @@
import Map from '../src/ol/Map.js';
import WebGLMap from '../src/ol/WebGLMap.js';
import View from '../src/ol/View.js';
import TileLayer from '../src/ol/layer/Tile.js';
import WebGLTileLayer from '../src/ol/layer/WebGLTile.js';
import OSM from '../src/ol/source/OSM.js';
const layer = new TileLayer({
source: new OSM()
});
const webGLLayer = new WebGLTileLayer({
source: new OSM()
});
const view = new View({
center: [0, 0],
zoom: 1
});
const map1 = new Map({
target: 'canvasMap',
layers: [layer],
view: view
});
const map2 = new WebGLMap({
target: 'webglMap',
layers: [webGLLayer],
view: view
});

View File

@@ -1,14 +0,0 @@
---
layout: example.html
title: Symbols with WebGL
shortdesc: Using symbols in an atlas with WebGL.
docs: >
<p>When using symbol styles with WebGL, OpenLayers would render the symbol
on a temporary image and would create a WebGL texture for each image. For a
better performance, it is recommended to use atlas images (similar to
image sprites with CSS), so that the number of textures is reduced. OpenLayers
provides an <code>AtlasManager</code>, which when passed to the constructor
of a symbol style, will create atlases for the symbols.</p>
tags: "webgl, symbol, atlas, vector, point"
---
<div id="map" class="map"></div>

View File

@@ -1,111 +0,0 @@
import Feature from '../src/ol/Feature.js';
import Map from '../src/ol/WebGLMap.js';
import View from '../src/ol/View.js';
import Point from '../src/ol/geom/Point.js';
import VectorLayer from '../src/ol/layer/WebGLVector.js';
import VectorSource from '../src/ol/source/Vector.js';
import {AtlasManager, Circle as CircleStyle, Fill, RegularShape, Stroke, Style} from '../src/ol/style.js';
const atlasManager = new AtlasManager({
// we increase the initial size so that all symbols fit into
// a single atlas image
initialSize: 512
});
const symbolInfo = [{
opacity: 1.0,
scale: 1.0,
fillColor: 'rgba(255, 153, 0, 0.4)',
strokeColor: 'rgba(255, 204, 0, 0.2)'
}, {
opacity: 0.75,
scale: 1.25,
fillColor: 'rgba(70, 80, 224, 0.4)',
strokeColor: 'rgba(12, 21, 138, 0.2)'
}, {
opacity: 0.5,
scale: 1.5,
fillColor: 'rgba(66, 150, 79, 0.4)',
strokeColor: 'rgba(20, 99, 32, 0.2)'
}, {
opacity: 1.0,
scale: 1.0,
fillColor: 'rgba(176, 61, 35, 0.4)',
strokeColor: 'rgba(145, 43, 20, 0.2)'
}];
const radiuses = [3, 6, 9, 15, 19, 25];
const symbolCount = symbolInfo.length * radiuses.length * 2;
const symbols = [];
let i, j;
for (i = 0; i < symbolInfo.length; ++i) {
const info = symbolInfo[i];
for (j = 0; j < radiuses.length; ++j) {
// circle symbol
symbols.push(new CircleStyle({
opacity: info.opacity,
scale: info.scale,
radius: radiuses[j],
fill: new Fill({
color: info.fillColor
}),
stroke: new Stroke({
color: info.strokeColor,
width: 1
}),
// by passing the atlas manager to the symbol,
// the symbol will be added to an atlas
atlasManager: atlasManager
}));
// star symbol
symbols.push(new RegularShape({
points: 8,
opacity: info.opacity,
scale: info.scale,
radius: radiuses[j],
radius2: radiuses[j] * 0.7,
angle: 1.4,
fill: new Fill({
color: info.fillColor
}),
stroke: new Stroke({
color: info.strokeColor,
width: 1
}),
atlasManager: atlasManager
}));
}
}
const featureCount = 50000;
const features = new Array(featureCount);
let feature, geometry;
const e = 25000000;
for (i = 0; i < featureCount; ++i) {
geometry = new Point(
[2 * e * Math.random() - e, 2 * e * Math.random() - e]);
feature = new Feature(geometry);
feature.setStyle(
new Style({
image: symbols[i % symbolCount]
})
);
features[i] = feature;
}
const vectorSource = new VectorSource({
features: features
});
const vector = new VectorLayer({
source: vectorSource
});
const map = new Map({
layers: [vector],
target: document.getElementById('map'),
view: new View({
center: [0, 0],
zoom: 4
})
});

View File

@@ -75,7 +75,6 @@
"loglevelnext": "^3.0.0",
"marked": "0.5.1",
"mocha": "5.2.0",
"mustache": "^3.0.0",
"ol-mapbox-style": "^3.3.0",
"pixelmatch": "^4.0.2",
"pngjs": "^3.3.3",

View File

@@ -68,8 +68,7 @@ import {create as createTransform, apply as applyTransform} from './transform.js
* Only layers which are visible and for which this function returns `true`
* will be tested for features. By default, all visible layers will be tested.
* @property {number} [hitTolerance=0] Hit-detection tolerance in pixels. Pixels
* inside the radius around the given position will be checked for features. This only
* works for the canvas renderer and not for WebGL.
* inside the radius around the given position will be checked for features.
*/

View File

@@ -1,85 +0,0 @@
/**
* @module ol/WebGLMap
*/
import PluggableMap from './PluggableMap.js';
import {defaults as defaultControls} from './control.js';
import {defaults as defaultInteractions} from './interaction.js';
import {assign} from './obj.js';
import WebGLMapRenderer from './renderer/webgl/Map.js';
/**
* @classdesc
* The WebGLMap uses WebGL for rendering map layers. This renderer has limited
* support for vector data and no support for vector tiles.
*
* import WebGLMap from 'ol/WebGLMap';
* import TileLayer from 'ol/layer/Tile';
* import OSM from 'ol/source/OSM';
* import View from 'ol/View';
*
* var map = new WebGLMap({
* view: new View({
* center: [0, 0],
* zoom: 1
* }),
* layers: [
* new TileLayer({
* source: new OSM()
* })
* ],
* target: 'map'
* });
*
* The above snippet creates a map using a {@link module:ol/layer/Tile~Tile} to
* display {@link module:ol/source/OSM~OSM} OSM data and render it to a DOM
* element with the id `map`.
*
* The constructor places a viewport container (with CSS class name
* `ol-viewport`) in the target element (see `getViewport()`), and then two
* further elements within the viewport: one with CSS class name
* `ol-overlaycontainer-stopevent` for controls and some overlays, and one with
* CSS class name `ol-overlaycontainer` for other overlays (see the `stopEvent`
* option of {@link module:ol/Overlay~Overlay} for the difference). The map
* itself is placed in a further element within the viewport.
*
* Layers are stored as a {@link module:ol/Collection~Collection} in
* layerGroups. A top-level group is provided by the library. This is what is
* accessed by `getLayerGroup` and `setLayerGroup`. Layers entered in the
* options are added to this group, and `addLayer` and `removeLayer` change the
* layer collection in the group. `getLayers` is a convenience function for
* `getLayerGroup().getLayers()`.
* Note that {@link module:ol/layer/Group~Group} is a subclass of
* {@link module:ol/layer/Base}, so layers entered in the options or added
* with `addLayer` can be groups, which can contain further groups, and so on.
*
* @fires import("./MapBrowserEvent.js").MapBrowserEvent
* @fires import("./MapEvent.js").MapEvent
* @fires module:ol/render/Event~RenderEvent#postcompose
* @fires module:ol/render/Event~RenderEvent#precompose
* @api
*/
class WebGLMap extends PluggableMap {
/**
* @param {import("./PluggableMap.js").MapOptions} options Map options.
*/
constructor(options) {
options = assign({}, options);
if (!options.controls) {
options.controls = defaultControls();
}
if (!options.interactions) {
options.interactions = defaultInteractions();
}
super(options);
}
createRenderer() {
return new WebGLMapRenderer(this);
}
}
export default WebGLMap;

View File

@@ -29,7 +29,6 @@ export {default as TileRange} from './TileRange.js';
export {default as VectorImageTile} from './VectorImageTile.js';
export {default as VectorTile} from './VectorTile.js';
export {default as View} from './View.js';
export {default as WebGLMap} from './WebGLMap.js';
export {getUid, inherits, VERSION} from './util.js';

View File

@@ -91,8 +91,7 @@ const SelectEventType = {
* @property {boolean} [wrapX=true] Wrap the world horizontally on the selection
* overlay.
* @property {number} [hitTolerance=0] Hit-detection tolerance. Pixels inside
* the radius around the given position will be checked for features. This only
* works for the canvas renderer and not for WebGL.
* the radius around the given position will be checked for features.
*/
@@ -314,8 +313,7 @@ class Select extends Interaction {
/**
* Hit-detection tolerance. Pixels inside the radius around the given position
* will be checked for features. This only works for the canvas renderer and
* not for WebGL.
* will be checked for features.
* @param {number} hitTolerance Hit tolerance in pixels.
* @api
*/

View File

@@ -46,8 +46,7 @@ const TranslateEventType = {
* `true` for layers that you want to be translatable. If the option is
* absent, all visible layers will be considered translatable.
* @property {number} [hitTolerance=0] Hit-detection tolerance. Pixels inside the radius around the given position
* will be checked for features. This only works for the canvas renderer and
* not for WebGL.
* will be checked for features.
*/
@@ -267,8 +266,7 @@ class Translate extends PointerInteraction {
/**
* Hit-detection tolerance. Pixels inside the radius around the given position
* will be checked for features. This only works for the canvas renderer and
* not for WebGL.
* will be checked for features.
* @param {number} hitTolerance Hit tolerance in pixels.
* @api
*/

View File

@@ -5,11 +5,8 @@ import {listen} from '../events.js';
import {getChangeEventType} from '../Object.js';
import {createCanvasContext2D} from '../dom.js';
import VectorLayer from './Vector.js';
import {clamp} from '../math.js';
import {assign} from '../obj.js';
import RenderEventType from '../render/EventType.js';
import Icon from '../style/Icon.js';
import Style from '../style/Style.js';
import WebGLPointsLayerRenderer from '../renderer/webgl/PointsLayer';
/**
@@ -84,7 +81,7 @@ class Heatmap extends VectorLayer {
/**
* @private
* @type {Uint8ClampedArray}
* @type {HTMLCanvasElement}
*/
this.gradient_ = null;
@@ -116,70 +113,18 @@ class Heatmap extends VectorLayer {
this.setRadius(options.radius !== undefined ? options.radius : 8);
listen(this,
getChangeEventType(Property.BLUR),
this.handleStyleChanged_, this);
listen(this,
getChangeEventType(Property.RADIUS),
this.handleStyleChanged_, this);
this.handleStyleChanged_();
const weight = options.weight ? options.weight : 'weight';
let weightFunction;
if (typeof weight === 'string') {
weightFunction = function(feature) {
this.weightFunction_ = function(feature) {
return feature.get(weight);
};
} else {
weightFunction = weight;
this.weightFunction_ = weight;
}
this.setStyle(function(feature, resolution) {
const weight = weightFunction(feature);
const opacity = weight !== undefined ? clamp(weight, 0, 1) : 1;
// cast to 8 bits
const index = (255 * opacity) | 0;
let style = this.styleCache_[index];
if (!style) {
style = [
new Style({
image: new Icon({
opacity: opacity,
src: this.circleImage_
})
})
];
this.styleCache_[index] = style;
}
return style;
}.bind(this));
// For performance reasons, don't sort the features before rendering.
// The render order is not relevant for a heatmap representation.
this.setRenderOrder(null);
listen(this, RenderEventType.RENDER, this.handleRender_, this);
}
/**
* @return {string} Data URL for a circle.
* @private
*/
createCircle_() {
const radius = this.getRadius();
const blur = this.getBlur();
const halfSize = radius + blur + 1;
const size = 2 * halfSize;
const context = createCanvasContext2D(size, size);
context.shadowOffsetX = context.shadowOffsetY = this.shadow_;
context.shadowBlur = blur;
context.shadowColor = '#000';
context.beginPath();
const center = halfSize - this.shadow_;
context.arc(center, center, radius, 0, Math.PI * 2, true);
context.fill();
return context.canvas.toDataURL();
}
/**
@@ -219,35 +164,6 @@ class Heatmap extends VectorLayer {
this.gradient_ = createGradient(this.getGradient());
}
/**
* @private
*/
handleStyleChanged_() {
this.circleImage_ = this.createCircle_();
this.styleCache_ = new Array(256);
this.changed();
}
/**
* @param {import("../render/Event.js").default} event Post compose event
* @private
*/
handleRender_(event) {
const context = event.context;
const canvas = context.canvas;
const image = context.getImageData(0, 0, canvas.width, canvas.height);
const view8 = image.data;
for (let i = 0, ii = view8.length; i < ii; i += 4) {
const alpha = view8[i + 3] * 4;
if (alpha) {
view8[i] = this.gradient_[alpha];
view8[i + 1] = this.gradient_[alpha + 1];
view8[i + 2] = this.gradient_[alpha + 2];
}
}
context.putImageData(image, 0, 0);
}
/**
* Set the blur size in pixels.
* @param {number} blur Blur size in pixels.
@@ -277,12 +193,167 @@ class Heatmap extends VectorLayer {
setRadius(radius) {
this.set(Property.RADIUS, radius);
}
/**
* @inheritDoc
*/
createRenderer() {
return new WebGLPointsLayerRenderer(this, {
vertexShader: `
precision mediump float;
attribute vec2 a_position;
attribute vec2 a_texCoord;
attribute float a_rotateWithView;
attribute vec2 a_offsets;
uniform mat4 u_projectionMatrix;
uniform mat4 u_offsetScaleMatrix;
uniform mat4 u_offsetRotateMatrix;
uniform float u_size;
varying vec2 v_texCoord;
void main(void) {
mat4 offsetMatrix = u_offsetScaleMatrix;
if (a_rotateWithView == 1.0) {
offsetMatrix = u_offsetScaleMatrix * u_offsetRotateMatrix;
}
vec4 offsets = offsetMatrix * vec4(a_offsets, 0.0, 0.0);
gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0) + offsets * u_size;
v_texCoord = a_texCoord;
}`,
fragmentShader: `
precision mediump float;
uniform float u_opacity;
uniform float u_resolution;
uniform float u_blur;
varying vec2 v_texCoord;
void main(void) {
gl_FragColor.rgb = vec3(1.0, 1.0, 1.0);
vec2 texCoord = v_texCoord * 2.0 - vec2(1.0, 1.0);
float sqRadius = texCoord.x * texCoord.x + texCoord.y * texCoord.y;
float alpha = 1.0 - sqRadius * sqRadius;
if (alpha <= 0.0) {
discard;
}
gl_FragColor.a = alpha * 0.30 + 1.0 / u_resolution;
}`,
uniforms: {
u_size: function() {
return this.get(Property.RADIUS) * 10;
}.bind(this),
u_resolution: function(frameState) {
return frameState.viewState.resolution;
}
},
postProcesses: [
{
fragmentShader: `
precision mediump float;
uniform sampler2D u_image;
uniform sampler2D u_gradientTexture;
uniform vec2 u_blurSize;
varying vec2 v_texCoord;
varying vec2 v_screenCoord;
void main() {
float weights[9];
weights[0] = weights[8] = 0.05;
weights[1] = weights[7] = 0.09;
weights[2] = weights[6] = 0.12;
weights[3] = weights[5] = 0.15;
weights[4] = 0.18;
vec4 sum = vec4(0.0);
vec2 offset;
vec4 center = texture2D(u_image, v_texCoord);
// vertical blur
offset = vec2(0.0, u_blurSize.y * 1.0);
sum += texture2D(u_image, v_texCoord + offset) * weights[0];
offset = vec2(0.0, u_blurSize.y * 0.75);
sum += texture2D(u_image, v_texCoord + offset) * weights[1];
offset = vec2(0.0, u_blurSize.y * 0.5);
sum += texture2D(u_image, v_texCoord + offset) * weights[2];
offset = vec2(0.0, u_blurSize.y * 0.25);
sum += texture2D(u_image, v_texCoord + offset) * weights[3];
offset = vec2(0.0, u_blurSize.y * 0.0);
sum += texture2D(u_image, v_texCoord + offset) * weights[4];
offset = vec2(0.0, u_blurSize.y * -0.25);
sum += texture2D(u_image, v_texCoord + offset) * weights[5];
offset = vec2(0.0, u_blurSize.y * -0.5);
sum += texture2D(u_image, v_texCoord + offset) * weights[6];
offset = vec2(0.0, u_blurSize.y * -0.75);
sum += texture2D(u_image, v_texCoord + offset) * weights[7];
offset = vec2(0.0, u_blurSize.y * -1.0);
sum += center * weights[8];
// horizontal blur
offset = vec2(u_blurSize.x * 1.0, 0.0);
sum += texture2D(u_image, v_texCoord + offset) * weights[0];
offset = vec2(u_blurSize.x * 0.75, 0.0);
sum += texture2D(u_image, v_texCoord + offset) * weights[1];
offset = vec2(u_blurSize.x * 0.5, 0.0);
sum += texture2D(u_image, v_texCoord + offset) * weights[2];
offset = vec2(u_blurSize.x * 0.25, 0.0);
sum += texture2D(u_image, v_texCoord + offset) * weights[3];
offset = vec2(u_blurSize.x * 0.0, 0.0);
sum += texture2D(u_image, v_texCoord + offset) * weights[4];
offset = vec2(u_blurSize.x * -0.25, 0.0);
sum += texture2D(u_image, v_texCoord + offset) * weights[5];
offset = vec2(u_blurSize.x * -0.5, 0.0);
sum += texture2D(u_image, v_texCoord + offset) * weights[6];
offset = vec2(u_blurSize.x * -0.75, 0.0);
sum += texture2D(u_image, v_texCoord + offset) * weights[7];
offset = vec2(u_blurSize.x * -1.0, 0.0);
sum += center * weights[8];
gl_FragColor = sum * 0.5;
}`,
scaleRatio: 0.5,
uniforms: {
u_blurSize: function(frameState) {
return [
this.get(Property.BLUR) / frameState.size[0],
this.get(Property.BLUR) / frameState.size[1]
];
}.bind(this)
}
},
{
fragmentShader: `
precision mediump float;
uniform sampler2D u_image;
uniform sampler2D u_gradientTexture;
varying vec2 v_texCoord;
varying vec2 v_screenCoord;
void main() {
vec4 color = texture2D(u_image, v_texCoord);
gl_FragColor.rgb = texture2D(u_gradientTexture, vec2(0.5, color.a)).rgb;
gl_FragColor.a = color.a;
}`,
uniforms: {
u_gradientTexture: this.gradient_
}
}
],
sizeCallback: function(feature) {
return this.weightFunction_(feature);
}.bind(this)
});
}
}
/**
* @param {Array<string>} colors A list of colored.
* @return {Uint8ClampedArray} An array.
* @return {HTMLCanvasElement} canvas with gradient texture.
*/
function createGradient(colors) {
const width = 1;
@@ -298,7 +369,7 @@ function createGradient(colors) {
context.fillStyle = gradient;
context.fillRect(0, 0, width, height);
return context.getImageData(0, 0, width, height).data;
return context.canvas;
}

View File

@@ -1,45 +0,0 @@
/**
* @module ol/layer/WebGLImage
*/
import BaseImageLayer from './BaseImage.js';
import WebGLImageLayerRenderer from '../renderer/webgl/ImageLayer.js';
/**
* @typedef {import("./BaseImage.js").Options} Options
*/
/**
* @classdesc
* Server-rendered images that are available for arbitrary extents and
* resolutions.
* Note that any property set in the options is set as a {@link module:ol/Object~BaseObject}
* property on the layer object; for example, setting `title: 'My Title'` in the
* options means that `title` is observable, and has get/set accessors.
*
* @fires import("../render/Event.js").RenderEvent
* @api
*/
class WebGLImageLayer extends BaseImageLayer {
/**
* @param {Options=} opt_options Layer options.
*/
constructor(opt_options) {
super(opt_options);
}
/**
* Create a renderer for this layer.
* @param {import("../renderer/webgl/Map.js").default} mapRenderer The map renderer.
* @return {import("../renderer/Layer.js").default} A layer renderer.
* @protected
*/
createRenderer(mapRenderer) {
return new WebGLImageLayerRenderer(mapRenderer, this);
}
}
export default WebGLImageLayer;

View File

@@ -1,43 +0,0 @@
/**
* @module ol/layer/WebGLTile
*/
import BaseTileLayer from './BaseTile.js';
import WebGLTileLayerRenderer from '../renderer/webgl/TileLayer.js';
/**
* @typedef {import("./BaseTile.js").Options} Options
*/
/**
* @classdesc
* For layer sources that provide pre-rendered, tiled images in grids that are
* organized by zoom levels for specific resolutions.
* Note that any property set in the options is set as a {@link module:ol/Object~BaseObject}
* property on the layer object; for example, setting `title: 'My Title'` in the
* options means that `title` is observable, and has get/set accessors.
*
* @api
*/
class WebGLTileLayer extends BaseTileLayer {
/**
* @param {Options=} opt_options Tile layer options.
*/
constructor(opt_options) {
super(opt_options);
}
/**
* Create a renderer for this layer.
* @param {import("../renderer/webgl/Map.js").default} mapRenderer The map renderer.
* @return {import("../renderer/Layer.js").default} A layer renderer.
* @protected
*/
createRenderer(mapRenderer) {
return new WebGLTileLayerRenderer(mapRenderer, this);
}
}
export default WebGLTileLayer;

View File

@@ -1,42 +0,0 @@
/**
* @module ol/layer/WebGLVector
*/
import BaseVectorLayer from './BaseVector.js';
import WebGLVectorLayerRenderer from '../renderer/webgl/VectorLayer.js';
/**
* @typedef {import("./BaseVector.js").Options} Options
*/
/**
* @classdesc
* Vector data that is rendered client-side.
* Note that any property set in the options is set as a {@link module:ol/Object~BaseObject}
* property on the layer object; for example, setting `title: 'My Title'` in the
* options means that `title` is observable, and has get/set accessors.
*
* @api
*/
class WebGLVectorLayer extends BaseVectorLayer {
/**
* @param {Options=} opt_options Options.
*/
constructor(opt_options) {
super(opt_options);
}
/**
* Create a renderer for this layer.
* @param {import("../renderer/webgl/Map.js").default} mapRenderer The map renderer.
* @return {import("../renderer/Layer.js").default} A layer renderer.
* @protected
*/
createRenderer(mapRenderer) {
return new WebGLVectorLayerRenderer(mapRenderer, this);
}
}
export default WebGLVectorLayer;

View File

@@ -11,7 +11,7 @@ class RenderEvent extends Event {
* @param {import("./VectorContext.js").default=} opt_vectorContext Vector context.
* @param {import("../PluggableMap.js").FrameState=} opt_frameState Frame state.
* @param {?CanvasRenderingContext2D=} opt_context Context.
* @param {?import("../webgl/Context.js").default=} opt_glContext WebGL Context.
* @param {?import("../webgl/Helper.js").default=} opt_glContext WebGL Context.
*/
constructor(type, opt_vectorContext, opt_frameState, opt_context, opt_glContext) {
@@ -42,7 +42,7 @@ class RenderEvent extends Event {
/**
* WebGL context. Only available when a WebGL renderer is used, null
* otherwise.
* @type {import("../webgl/Context.js").default|null|undefined}
* @type {import("../webgl/Helper.js").default|null|undefined}
* @api
*/
this.glContext = opt_glContext;

View File

@@ -1,403 +0,0 @@
/**
* @module ol/render/webgl/CircleReplay
*/
import {getUid} from '../../util.js';
import {equals} from '../../array.js';
import {asArray} from '../../color.js';
import {intersects} from '../../extent.js';
import {isEmpty} from '../../obj.js';
import {translate} from '../../geom/flat/transform.js';
import {fragment, vertex} from './circlereplay/defaultshader.js';
import Locations from './circlereplay/defaultshader/Locations.js';
import WebGLReplay from './Replay.js';
import {DEFAULT_LINEDASH, DEFAULT_LINEDASHOFFSET, DEFAULT_STROKESTYLE,
DEFAULT_FILLSTYLE, DEFAULT_LINEWIDTH} from '../webgl.js';
import {FLOAT} from '../../webgl.js';
import WebGLBuffer from '../../webgl/Buffer.js';
class WebGLCircleReplay extends WebGLReplay {
/**
* @param {number} tolerance Tolerance.
* @param {import("../../extent.js").Extent} maxExtent Max extent.
*/
constructor(tolerance, maxExtent) {
super(tolerance, maxExtent);
/**
* @private
* @type {import("./circlereplay/defaultshader/Locations.js").default}
*/
this.defaultLocations_ = null;
/**
* @private
* @type {Array<Array<Array<number>|number>>}
*/
this.styles_ = [];
/**
* @private
* @type {Array<number>}
*/
this.styleIndices_ = [];
/**
* @private
* @type {number}
*/
this.radius_ = 0;
/**
* @private
* @type {{fillColor: (Array<number>|null),
* strokeColor: (Array<number>|null),
* lineDash: Array<number>,
* lineDashOffset: (number|undefined),
* lineWidth: (number|undefined),
* changed: boolean}|null}
*/
this.state_ = {
fillColor: null,
strokeColor: null,
lineDash: null,
lineDashOffset: undefined,
lineWidth: undefined,
changed: false
};
}
/**
* @private
* @param {Array<number>} flatCoordinates Flat coordinates.
* @param {number} offset Offset.
* @param {number} end End.
* @param {number} stride Stride.
*/
drawCoordinates_(flatCoordinates, offset, end, stride) {
let numVertices = this.vertices.length;
let numIndices = this.indices.length;
let n = numVertices / 4;
let i, ii;
for (i = offset, ii = end; i < ii; i += stride) {
this.vertices[numVertices++] = flatCoordinates[i];
this.vertices[numVertices++] = flatCoordinates[i + 1];
this.vertices[numVertices++] = 0;
this.vertices[numVertices++] = this.radius_;
this.vertices[numVertices++] = flatCoordinates[i];
this.vertices[numVertices++] = flatCoordinates[i + 1];
this.vertices[numVertices++] = 1;
this.vertices[numVertices++] = this.radius_;
this.vertices[numVertices++] = flatCoordinates[i];
this.vertices[numVertices++] = flatCoordinates[i + 1];
this.vertices[numVertices++] = 2;
this.vertices[numVertices++] = this.radius_;
this.vertices[numVertices++] = flatCoordinates[i];
this.vertices[numVertices++] = flatCoordinates[i + 1];
this.vertices[numVertices++] = 3;
this.vertices[numVertices++] = this.radius_;
this.indices[numIndices++] = n;
this.indices[numIndices++] = n + 1;
this.indices[numIndices++] = n + 2;
this.indices[numIndices++] = n + 2;
this.indices[numIndices++] = n + 3;
this.indices[numIndices++] = n;
n += 4;
}
}
/**
* @inheritDoc
*/
drawCircle(circleGeometry, feature) {
const radius = circleGeometry.getRadius();
const stride = circleGeometry.getStride();
if (radius) {
this.startIndices.push(this.indices.length);
this.startIndicesFeature.push(feature);
if (this.state_.changed) {
this.styleIndices_.push(this.indices.length);
this.state_.changed = false;
}
this.radius_ = radius;
let flatCoordinates = circleGeometry.getFlatCoordinates();
flatCoordinates = translate(flatCoordinates, 0, 2,
stride, -this.origin[0], -this.origin[1]);
this.drawCoordinates_(flatCoordinates, 0, 2, stride);
} else {
if (this.state_.changed) {
this.styles_.pop();
if (this.styles_.length) {
const lastState = this.styles_[this.styles_.length - 1];
this.state_.fillColor = /** @type {Array<number>} */ (lastState[0]);
this.state_.strokeColor = /** @type {Array<number>} */ (lastState[1]);
this.state_.lineWidth = /** @type {number} */ (lastState[2]);
this.state_.changed = false;
}
}
}
}
/**
* @inheritDoc
**/
finish(context) {
// create, bind, and populate the vertices buffer
this.verticesBuffer = new WebGLBuffer(this.vertices);
// create, bind, and populate the indices buffer
this.indicesBuffer = new WebGLBuffer(this.indices);
this.startIndices.push(this.indices.length);
//Clean up, if there is nothing to draw
if (this.styleIndices_.length === 0 && this.styles_.length > 0) {
this.styles_ = [];
}
this.vertices = null;
this.indices = null;
}
/**
* @inheritDoc
*/
getDeleteResourcesFunction(context) {
// We only delete our stuff here. The shaders and the program may
// be used by other CircleReplay instances (for other layers). And
// they will be deleted when disposing of the import("../../webgl/Context.js").WebGLContext
// object.
const verticesBuffer = this.verticesBuffer;
const indicesBuffer = this.indicesBuffer;
return function() {
context.deleteBuffer(verticesBuffer);
context.deleteBuffer(indicesBuffer);
};
}
/**
* @inheritDoc
*/
setUpProgram(gl, context, size, pixelRatio) {
// get the program
const program = context.getProgram(fragment, vertex);
// get the locations
let locations;
if (!this.defaultLocations_) {
locations = new Locations(gl, program);
this.defaultLocations_ = locations;
} else {
locations = this.defaultLocations_;
}
context.useProgram(program);
// enable the vertex attrib arrays
gl.enableVertexAttribArray(locations.a_position);
gl.vertexAttribPointer(locations.a_position, 2, FLOAT,
false, 16, 0);
gl.enableVertexAttribArray(locations.a_instruction);
gl.vertexAttribPointer(locations.a_instruction, 1, FLOAT,
false, 16, 8);
gl.enableVertexAttribArray(locations.a_radius);
gl.vertexAttribPointer(locations.a_radius, 1, FLOAT,
false, 16, 12);
// Enable renderer specific uniforms.
gl.uniform2fv(locations.u_size, size);
gl.uniform1f(locations.u_pixelRatio, pixelRatio);
return locations;
}
/**
* @inheritDoc
*/
shutDownProgram(gl, locations) {
gl.disableVertexAttribArray(locations.a_position);
gl.disableVertexAttribArray(locations.a_instruction);
gl.disableVertexAttribArray(locations.a_radius);
}
/**
* @inheritDoc
*/
drawReplay(gl, context, skippedFeaturesHash, hitDetection) {
if (!isEmpty(skippedFeaturesHash)) {
this.drawReplaySkipping_(gl, context, skippedFeaturesHash);
} else {
//Draw by style groups to minimize drawElements() calls.
let i, start, end, nextStyle;
end = this.startIndices[this.startIndices.length - 1];
for (i = this.styleIndices_.length - 1; i >= 0; --i) {
start = this.styleIndices_[i];
nextStyle = this.styles_[i];
this.setFillStyle_(gl, /** @type {Array<number>} */ (nextStyle[0]));
this.setStrokeStyle_(gl, /** @type {Array<number>} */ (nextStyle[1]),
/** @type {number} */ (nextStyle[2]));
this.drawElements(gl, context, start, end);
end = start;
}
}
}
/**
* @inheritDoc
*/
drawHitDetectionReplayOneByOne(gl, context, skippedFeaturesHash, featureCallback, opt_hitExtent) {
let i, start, end, nextStyle, groupStart, feature, featureIndex;
featureIndex = this.startIndices.length - 2;
end = this.startIndices[featureIndex + 1];
for (i = this.styleIndices_.length - 1; i >= 0; --i) {
nextStyle = this.styles_[i];
this.setFillStyle_(gl, /** @type {Array<number>} */ (nextStyle[0]));
this.setStrokeStyle_(gl, /** @type {Array<number>} */ (nextStyle[1]),
/** @type {number} */ (nextStyle[2]));
groupStart = this.styleIndices_[i];
while (featureIndex >= 0 &&
this.startIndices[featureIndex] >= groupStart) {
start = this.startIndices[featureIndex];
feature = this.startIndicesFeature[featureIndex];
if (skippedFeaturesHash[getUid(feature)] === undefined &&
feature.getGeometry() &&
(opt_hitExtent === undefined || intersects(
/** @type {Array<number>} */ (opt_hitExtent),
feature.getGeometry().getExtent()))) {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
this.drawElements(gl, context, start, end);
const result = featureCallback(feature);
if (result) {
return result;
}
}
featureIndex--;
end = start;
}
}
return undefined;
}
/**
* @private
* @param {WebGLRenderingContext} gl gl.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {Object} skippedFeaturesHash Ids of features to skip.
*/
drawReplaySkipping_(gl, context, skippedFeaturesHash) {
let i, start, end, nextStyle, groupStart, feature, featureIndex, featureStart;
featureIndex = this.startIndices.length - 2;
end = start = this.startIndices[featureIndex + 1];
for (i = this.styleIndices_.length - 1; i >= 0; --i) {
nextStyle = this.styles_[i];
this.setFillStyle_(gl, /** @type {Array<number>} */ (nextStyle[0]));
this.setStrokeStyle_(gl, /** @type {Array<number>} */ (nextStyle[1]),
/** @type {number} */ (nextStyle[2]));
groupStart = this.styleIndices_[i];
while (featureIndex >= 0 &&
this.startIndices[featureIndex] >= groupStart) {
featureStart = this.startIndices[featureIndex];
feature = this.startIndicesFeature[featureIndex];
if (skippedFeaturesHash[getUid(feature)]) {
if (start !== end) {
this.drawElements(gl, context, start, end);
}
end = featureStart;
}
featureIndex--;
start = featureStart;
}
if (start !== end) {
this.drawElements(gl, context, start, end);
}
start = end = groupStart;
}
}
/**
* @private
* @param {WebGLRenderingContext} gl gl.
* @param {Array<number>} color Color.
*/
setFillStyle_(gl, color) {
gl.uniform4fv(this.defaultLocations_.u_fillColor, color);
}
/**
* @private
* @param {WebGLRenderingContext} gl gl.
* @param {Array<number>} color Color.
* @param {number} lineWidth Line width.
*/
setStrokeStyle_(gl, color, lineWidth) {
gl.uniform4fv(this.defaultLocations_.u_strokeColor, color);
gl.uniform1f(this.defaultLocations_.u_lineWidth, lineWidth);
}
/**
* @inheritDoc
*/
setFillStrokeStyle(fillStyle, strokeStyle) {
let strokeStyleColor, strokeStyleWidth;
if (strokeStyle) {
const strokeStyleLineDash = strokeStyle.getLineDash();
this.state_.lineDash = strokeStyleLineDash ?
strokeStyleLineDash : DEFAULT_LINEDASH;
const strokeStyleLineDashOffset = strokeStyle.getLineDashOffset();
this.state_.lineDashOffset = strokeStyleLineDashOffset ?
strokeStyleLineDashOffset : DEFAULT_LINEDASHOFFSET;
strokeStyleColor = strokeStyle.getColor();
if (!(strokeStyleColor instanceof CanvasGradient) &&
!(strokeStyleColor instanceof CanvasPattern)) {
strokeStyleColor = asArray(strokeStyleColor).map(function(c, i) {
return i != 3 ? c / 255 : c;
}) || DEFAULT_STROKESTYLE;
} else {
strokeStyleColor = DEFAULT_STROKESTYLE;
}
strokeStyleWidth = strokeStyle.getWidth();
strokeStyleWidth = strokeStyleWidth !== undefined ?
strokeStyleWidth : DEFAULT_LINEWIDTH;
} else {
strokeStyleColor = [0, 0, 0, 0];
strokeStyleWidth = 0;
}
let fillStyleColor = fillStyle ? fillStyle.getColor() : [0, 0, 0, 0];
if (!(fillStyleColor instanceof CanvasGradient) &&
!(fillStyleColor instanceof CanvasPattern)) {
fillStyleColor = asArray(fillStyleColor).map(function(c, i) {
return i != 3 ? c / 255 : c;
}) || DEFAULT_FILLSTYLE;
} else {
fillStyleColor = DEFAULT_FILLSTYLE;
}
if (!this.state_.strokeColor || !equals(this.state_.strokeColor, strokeStyleColor) ||
!this.state_.fillColor || !equals(this.state_.fillColor, fillStyleColor) ||
this.state_.lineWidth !== strokeStyleWidth) {
this.state_.changed = true;
this.state_.fillColor = fillStyleColor;
this.state_.strokeColor = strokeStyleColor;
this.state_.lineWidth = strokeStyleWidth;
this.styles_.push([fillStyleColor, strokeStyleColor, strokeStyleWidth]);
}
}
}
export default WebGLCircleReplay;

View File

@@ -1,164 +0,0 @@
/**
* @module ol/render/webgl/ImageReplay
*/
import {getUid} from '../../util.js';
import WebGLTextureReplay from './TextureReplay.js';
import WebGLBuffer from '../../webgl/Buffer.js';
class WebGLImageReplay extends WebGLTextureReplay {
/**
* @param {number} tolerance Tolerance.
* @param {import("../../extent.js").Extent} maxExtent Max extent.
*/
constructor(tolerance, maxExtent) {
super(tolerance, maxExtent);
/**
* @type {Array<HTMLCanvasElement|HTMLImageElement|HTMLVideoElement>}
* @protected
*/
this.images_ = [];
/**
* @type {Array<HTMLCanvasElement|HTMLImageElement|HTMLVideoElement>}
* @protected
*/
this.hitDetectionImages_ = [];
/**
* @type {Array<WebGLTexture>}
* @private
*/
this.textures_ = [];
/**
* @type {Array<WebGLTexture>}
* @private
*/
this.hitDetectionTextures_ = [];
}
/**
* @inheritDoc
*/
drawMultiPoint(multiPointGeometry, feature) {
this.startIndices.push(this.indices.length);
this.startIndicesFeature.push(feature);
const flatCoordinates = multiPointGeometry.getFlatCoordinates();
const stride = multiPointGeometry.getStride();
this.drawCoordinates(
flatCoordinates, 0, flatCoordinates.length, stride);
}
/**
* @inheritDoc
*/
drawPoint(pointGeometry, feature) {
this.startIndices.push(this.indices.length);
this.startIndicesFeature.push(feature);
const flatCoordinates = pointGeometry.getFlatCoordinates();
const stride = pointGeometry.getStride();
this.drawCoordinates(
flatCoordinates, 0, flatCoordinates.length, stride);
}
/**
* @inheritDoc
*/
finish(context) {
const gl = context.getGL();
this.groupIndices.push(this.indices.length);
this.hitDetectionGroupIndices.push(this.indices.length);
// create, bind, and populate the vertices buffer
this.verticesBuffer = new WebGLBuffer(this.vertices);
const indices = this.indices;
// create, bind, and populate the indices buffer
this.indicesBuffer = new WebGLBuffer(indices);
// create textures
/** @type {Object<string, WebGLTexture>} */
const texturePerImage = {};
this.createTextures(this.textures_, this.images_, texturePerImage, gl);
this.createTextures(this.hitDetectionTextures_, this.hitDetectionImages_,
texturePerImage, gl);
this.images_ = null;
this.hitDetectionImages_ = null;
super.finish(context);
}
/**
* @inheritDoc
*/
setImageStyle(imageStyle) {
const anchor = imageStyle.getAnchor();
const image = imageStyle.getImage(1);
const imageSize = imageStyle.getImageSize();
const hitDetectionImage = imageStyle.getHitDetectionImage(1);
const opacity = imageStyle.getOpacity();
const origin = imageStyle.getOrigin();
const rotateWithView = imageStyle.getRotateWithView();
const rotation = imageStyle.getRotation();
const size = imageStyle.getSize();
const scale = imageStyle.getScale();
let currentImage;
if (this.images_.length === 0) {
this.images_.push(image);
} else {
currentImage = this.images_[this.images_.length - 1];
if (getUid(currentImage) != getUid(image)) {
this.groupIndices.push(this.indices.length);
this.images_.push(image);
}
}
if (this.hitDetectionImages_.length === 0) {
this.hitDetectionImages_.push(hitDetectionImage);
} else {
currentImage =
this.hitDetectionImages_[this.hitDetectionImages_.length - 1];
if (getUid(currentImage) != getUid(hitDetectionImage)) {
this.hitDetectionGroupIndices.push(this.indices.length);
this.hitDetectionImages_.push(hitDetectionImage);
}
}
this.anchorX = anchor[0];
this.anchorY = anchor[1];
this.height = size[1];
this.imageHeight = imageSize[1];
this.imageWidth = imageSize[0];
this.opacity = opacity;
this.originX = origin[0];
this.originY = origin[1];
this.rotation = rotation;
this.rotateWithView = rotateWithView;
this.scale = scale;
this.width = size[0];
}
/**
* @inheritDoc
*/
getTextures(opt_all) {
return opt_all ? this.textures_.concat(this.hitDetectionTextures_) : this.textures_;
}
/**
* @inheritDoc
*/
getHitDetectionTextures() {
return this.hitDetectionTextures_;
}
}
export default WebGLImageReplay;

View File

@@ -1,394 +0,0 @@
/**
* @module ol/render/webgl/Immediate
*/
import {intersects} from '../../extent.js';
import GeometryType from '../../geom/GeometryType.js';
import ReplayType from '../ReplayType.js';
import VectorContext from '../VectorContext.js';
import WebGLReplayGroup from './ReplayGroup.js';
class WebGLImmediateRenderer extends VectorContext {
/**
* @param {import("../../webgl/Context.js").default} context Context.
* @param {import("../../coordinate.js").Coordinate} center Center.
* @param {number} resolution Resolution.
* @param {number} rotation Rotation.
* @param {import("../../size.js").Size} size Size.
* @param {import("../../extent.js").Extent} extent Extent.
* @param {number} pixelRatio Pixel ratio.
*/
constructor(context, center, resolution, rotation, size, extent, pixelRatio) {
super();
/**
* @private
*/
this.context_ = context;
/**
* @private
*/
this.center_ = center;
/**
* @private
*/
this.extent_ = extent;
/**
* @private
*/
this.pixelRatio_ = pixelRatio;
/**
* @private
*/
this.size_ = size;
/**
* @private
*/
this.rotation_ = rotation;
/**
* @private
*/
this.resolution_ = resolution;
/**
* @private
* @type {import("../../style/Image.js").default}
*/
this.imageStyle_ = null;
/**
* @private
* @type {import("../../style/Fill.js").default}
*/
this.fillStyle_ = null;
/**
* @private
* @type {import("../../style/Stroke.js").default}
*/
this.strokeStyle_ = null;
/**
* @private
* @type {import("../../style/Text.js").default}
*/
this.textStyle_ = null;
}
/**
* @param {import("./ReplayGroup.js").default} replayGroup Replay group.
* @param {import("../../geom/Geometry.js").default|import("../Feature.js").default} geometry Geometry.
* @private
*/
drawText_(replayGroup, geometry) {
const context = this.context_;
const replay = /** @type {import("./TextReplay.js").default} */ (
replayGroup.getBuilder(0, ReplayType.TEXT));
replay.setTextStyle(this.textStyle_);
replay.drawText(geometry, null);
replay.finish(context);
// default colors
const opacity = 1;
/** @type {Object<string, boolean>} */
const skippedFeatures = {};
let featureCallback;
const oneByOne = false;
replay.replay(this.context_, this.center_, this.resolution_, this.rotation_,
this.size_, this.pixelRatio_, opacity, skippedFeatures, featureCallback,
oneByOne);
replay.getDeleteResourcesFunction(context)();
}
/**
* Set the rendering style. Note that since this is an immediate rendering API,
* any `zIndex` on the provided style will be ignored.
*
* @param {import("../../style/Style.js").default} style The rendering style.
* @override
* @api
*/
setStyle(style) {
this.setFillStrokeStyle(style.getFill(), style.getStroke());
this.setImageStyle(style.getImage());
this.setTextStyle(style.getText());
}
/**
* Render a geometry into the canvas. Call
* {@link ol/render/webgl/Immediate#setStyle} first to set the rendering style.
*
* @param {import("../../geom/Geometry.js").default|import("../Feature.js").default} geometry The geometry to render.
* @override
* @api
*/
drawGeometry(geometry) {
const type = geometry.getType();
switch (type) {
case GeometryType.POINT:
this.drawPoint(/** @type {import("../../geom/Point.js").default} */ (geometry), null);
break;
case GeometryType.LINE_STRING:
this.drawLineString(/** @type {import("../../geom/LineString.js").default} */ (geometry), null);
break;
case GeometryType.POLYGON:
this.drawPolygon(/** @type {import("../../geom/Polygon.js").default} */ (geometry), null);
break;
case GeometryType.MULTI_POINT:
this.drawMultiPoint(/** @type {import("../../geom/MultiPoint.js").default} */ (geometry), null);
break;
case GeometryType.MULTI_LINE_STRING:
this.drawMultiLineString(/** @type {import("../../geom/MultiLineString.js").default} */ (geometry), null);
break;
case GeometryType.MULTI_POLYGON:
this.drawMultiPolygon(/** @type {import("../../geom/MultiPolygon.js").default} */ (geometry), null);
break;
case GeometryType.GEOMETRY_COLLECTION:
this.drawGeometryCollection(/** @type {import("../../geom/GeometryCollection.js").default} */ (geometry), null);
break;
case GeometryType.CIRCLE:
this.drawCircle(/** @type {import("../../geom/Circle.js").default} */ (geometry), null);
break;
default:
// pass
}
}
/**
* @inheritDoc
* @api
*/
drawFeature(feature, style) {
const geometry = style.getGeometryFunction()(feature);
if (!geometry || !intersects(this.extent_, geometry.getExtent())) {
return;
}
this.setStyle(style);
this.drawGeometry(geometry);
}
/**
* @inheritDoc
*/
drawGeometryCollection(geometry, data) {
const geometries = geometry.getGeometriesArray();
let i, ii;
for (i = 0, ii = geometries.length; i < ii; ++i) {
this.drawGeometry(geometries[i]);
}
}
/**
* @inheritDoc
*/
drawPoint(geometry, data) {
const context = this.context_;
const replayGroup = new WebGLReplayGroup(1, this.extent_);
const replay = /** @type {import("./ImageReplay.js").default} */ (
replayGroup.getBuilder(0, ReplayType.IMAGE));
replay.setImageStyle(this.imageStyle_);
replay.drawPoint(geometry, data);
replay.finish(context);
// default colors
const opacity = 1;
/** @type {Object<string, boolean>} */
const skippedFeatures = {};
let featureCallback;
const oneByOne = false;
replay.replay(this.context_, this.center_, this.resolution_, this.rotation_,
this.size_, this.pixelRatio_, opacity, skippedFeatures, featureCallback,
oneByOne);
replay.getDeleteResourcesFunction(context)();
if (this.textStyle_) {
this.drawText_(replayGroup, geometry);
}
}
/**
* @inheritDoc
*/
drawMultiPoint(geometry, data) {
const context = this.context_;
const replayGroup = new WebGLReplayGroup(1, this.extent_);
const replay = /** @type {import("./ImageReplay.js").default} */ (
replayGroup.getBuilder(0, ReplayType.IMAGE));
replay.setImageStyle(this.imageStyle_);
replay.drawMultiPoint(geometry, data);
replay.finish(context);
const opacity = 1;
/** @type {Object<string, boolean>} */
const skippedFeatures = {};
let featureCallback;
const oneByOne = false;
replay.replay(this.context_, this.center_, this.resolution_, this.rotation_,
this.size_, this.pixelRatio_, opacity, skippedFeatures, featureCallback,
oneByOne);
replay.getDeleteResourcesFunction(context)();
if (this.textStyle_) {
this.drawText_(replayGroup, geometry);
}
}
/**
* @inheritDoc
*/
drawLineString(geometry, data) {
const context = this.context_;
const replayGroup = new WebGLReplayGroup(1, this.extent_);
const replay = /** @type {import("./LineStringReplay.js").default} */ (
replayGroup.getBuilder(0, ReplayType.LINE_STRING));
replay.setFillStrokeStyle(null, this.strokeStyle_);
replay.drawLineString(geometry, data);
replay.finish(context);
const opacity = 1;
/** @type {Object<string, boolean>} */
const skippedFeatures = {};
let featureCallback;
const oneByOne = false;
replay.replay(this.context_, this.center_, this.resolution_, this.rotation_,
this.size_, this.pixelRatio_, opacity, skippedFeatures, featureCallback,
oneByOne);
replay.getDeleteResourcesFunction(context)();
if (this.textStyle_) {
this.drawText_(replayGroup, geometry);
}
}
/**
* @inheritDoc
*/
drawMultiLineString(geometry, data) {
const context = this.context_;
const replayGroup = new WebGLReplayGroup(1, this.extent_);
const replay = /** @type {import("./LineStringReplay.js").default} */ (
replayGroup.getBuilder(0, ReplayType.LINE_STRING));
replay.setFillStrokeStyle(null, this.strokeStyle_);
replay.drawMultiLineString(geometry, data);
replay.finish(context);
const opacity = 1;
/** @type {Object<string, boolean>} */
const skippedFeatures = {};
let featureCallback;
const oneByOne = false;
replay.replay(this.context_, this.center_, this.resolution_, this.rotation_,
this.size_, this.pixelRatio_, opacity, skippedFeatures, featureCallback,
oneByOne);
replay.getDeleteResourcesFunction(context)();
if (this.textStyle_) {
this.drawText_(replayGroup, geometry);
}
}
/**
* @inheritDoc
*/
drawPolygon(geometry, data) {
const context = this.context_;
const replayGroup = new WebGLReplayGroup(1, this.extent_);
const replay = /** @type {import("./PolygonReplay.js").default} */ (
replayGroup.getBuilder(0, ReplayType.POLYGON));
replay.setFillStrokeStyle(this.fillStyle_, this.strokeStyle_);
replay.drawPolygon(geometry, data);
replay.finish(context);
const opacity = 1;
/** @type {Object<string, boolean>} */
const skippedFeatures = {};
let featureCallback;
const oneByOne = false;
replay.replay(this.context_, this.center_, this.resolution_, this.rotation_,
this.size_, this.pixelRatio_, opacity, skippedFeatures, featureCallback,
oneByOne);
replay.getDeleteResourcesFunction(context)();
if (this.textStyle_) {
this.drawText_(replayGroup, geometry);
}
}
/**
* @inheritDoc
*/
drawMultiPolygon(geometry, data) {
const context = this.context_;
const replayGroup = new WebGLReplayGroup(1, this.extent_);
const replay = /** @type {import("./PolygonReplay.js").default} */ (
replayGroup.getBuilder(0, ReplayType.POLYGON));
replay.setFillStrokeStyle(this.fillStyle_, this.strokeStyle_);
replay.drawMultiPolygon(geometry, data);
replay.finish(context);
const opacity = 1;
/** @type {Object<string, boolean>} */
const skippedFeatures = {};
let featureCallback;
const oneByOne = false;
replay.replay(this.context_, this.center_, this.resolution_, this.rotation_,
this.size_, this.pixelRatio_, opacity, skippedFeatures, featureCallback,
oneByOne);
replay.getDeleteResourcesFunction(context)();
if (this.textStyle_) {
this.drawText_(replayGroup, geometry);
}
}
/**
* @inheritDoc
*/
drawCircle(geometry, data) {
const context = this.context_;
const replayGroup = new WebGLReplayGroup(1, this.extent_);
const replay = /** @type {import("./CircleReplay.js").default} */ (
replayGroup.getBuilder(0, ReplayType.CIRCLE));
replay.setFillStrokeStyle(this.fillStyle_, this.strokeStyle_);
replay.drawCircle(geometry, data);
replay.finish(context);
const opacity = 1;
/** @type {Object<string, boolean>} */
const skippedFeatures = {};
let featureCallback;
const oneByOne = false;
replay.replay(this.context_, this.center_, this.resolution_, this.rotation_,
this.size_, this.pixelRatio_, opacity, skippedFeatures, featureCallback,
oneByOne);
replay.getDeleteResourcesFunction(context)();
if (this.textStyle_) {
this.drawText_(replayGroup, geometry);
}
}
/**
* @inheritDoc
*/
setImageStyle(imageStyle) {
this.imageStyle_ = imageStyle;
}
/**
* @inheritDoc
*/
setFillStrokeStyle(fillStyle, strokeStyle) {
this.fillStyle_ = fillStyle;
this.strokeStyle_ = strokeStyle;
}
/**
* @inheritDoc
*/
setTextStyle(textStyle) {
this.textStyle_ = textStyle;
}
}
export default WebGLImmediateRenderer;

View File

@@ -1,665 +0,0 @@
/**
* @module ol/render/webgl/LineStringReplay
*/
import {getUid} from '../../util.js';
import {equals} from '../../array.js';
import {asArray} from '../../color.js';
import {intersects} from '../../extent.js';
import {linearRingIsClockwise} from '../../geom/flat/orient.js';
import {translate} from '../../geom/flat/transform.js';
import {lineStringIsClosed} from '../../geom/flat/topology.js';
import {isEmpty} from '../../obj.js';
import {DEFAULT_LINECAP, DEFAULT_LINEDASH, DEFAULT_LINEDASHOFFSET,
DEFAULT_LINEJOIN, DEFAULT_LINEWIDTH, DEFAULT_MITERLIMIT, DEFAULT_STROKESTYLE,
triangleIsCounterClockwise} from '../webgl.js';
import WebGLReplay from './Replay.js';
import {fragment, vertex} from './linestringreplay/defaultshader.js';
import Locations from './linestringreplay/defaultshader/Locations.js';
import {FLOAT} from '../../webgl.js';
import WebGLBuffer from '../../webgl/Buffer.js';
/**
* @enum {number}
*/
const Instruction = {
ROUND: 2,
BEGIN_LINE: 3,
END_LINE: 5,
BEGIN_LINE_CAP: 7,
END_LINE_CAP: 11,
BEVEL_FIRST: 13,
BEVEL_SECOND: 17,
MITER_BOTTOM: 19,
MITER_TOP: 23
};
class WebGLLineStringReplay extends WebGLReplay {
/**
* @param {number} tolerance Tolerance.
* @param {import("../../extent.js").Extent} maxExtent Max extent.
*/
constructor(tolerance, maxExtent) {
super(tolerance, maxExtent);
/**
* @private
* @type {import("./linestringreplay/defaultshader/Locations.js").default}
*/
this.defaultLocations_ = null;
/**
* @private
* @type {Array<Array<?>>}
*/
this.styles_ = [];
/**
* @private
* @type {Array<number>}
*/
this.styleIndices_ = [];
/**
* @private
* @type {{strokeColor: (Array<number>|null),
* lineCap: (string|undefined),
* lineDash: Array<number>,
* lineDashOffset: (number|undefined),
* lineJoin: (string|undefined),
* lineWidth: (number|undefined),
* miterLimit: (number|undefined),
* changed: boolean}|null}
*/
this.state_ = {
strokeColor: null,
lineCap: undefined,
lineDash: null,
lineDashOffset: undefined,
lineJoin: undefined,
lineWidth: undefined,
miterLimit: undefined,
changed: false
};
}
/**
* Draw one segment.
* @private
* @param {Array<number>} flatCoordinates Flat coordinates.
* @param {number} offset Offset.
* @param {number} end End.
* @param {number} stride Stride.
*/
drawCoordinates_(flatCoordinates, offset, end, stride) {
let i, ii;
let numVertices = this.vertices.length;
let numIndices = this.indices.length;
//To save a vertex, the direction of a point is a product of the sign (1 or -1), a prime from
//Instruction, and a rounding factor (1 or 2). If the product is even,
//we round it. If it is odd, we don't.
const lineJoin = this.state_.lineJoin === 'bevel' ? 0 :
this.state_.lineJoin === 'miter' ? 1 : 2;
const lineCap = this.state_.lineCap === 'butt' ? 0 :
this.state_.lineCap === 'square' ? 1 : 2;
const closed = lineStringIsClosed(flatCoordinates, offset, end, stride);
let startCoords, sign, n;
let lastIndex = numIndices;
let lastSign = 1;
//We need the adjacent vertices to define normals in joins. p0 = last, p1 = current, p2 = next.
let p0, p1, p2;
for (i = offset, ii = end; i < ii; i += stride) {
n = numVertices / 7;
p0 = p1;
p1 = p2 || [flatCoordinates[i], flatCoordinates[i + 1]];
//First vertex.
if (i === offset) {
p2 = [flatCoordinates[i + stride], flatCoordinates[i + stride + 1]];
if (end - offset === stride * 2 && equals(p1, p2)) {
break;
}
if (closed) {
//A closed line! Complete the circle.
p0 = [flatCoordinates[end - stride * 2],
flatCoordinates[end - stride * 2 + 1]];
startCoords = p2;
} else {
//Add the first two/four vertices.
if (lineCap) {
numVertices = this.addVertices_([0, 0], p1, p2,
lastSign * Instruction.BEGIN_LINE_CAP * lineCap, numVertices);
numVertices = this.addVertices_([0, 0], p1, p2,
-lastSign * Instruction.BEGIN_LINE_CAP * lineCap, numVertices);
this.indices[numIndices++] = n + 2;
this.indices[numIndices++] = n;
this.indices[numIndices++] = n + 1;
this.indices[numIndices++] = n + 1;
this.indices[numIndices++] = n + 3;
this.indices[numIndices++] = n + 2;
}
numVertices = this.addVertices_([0, 0], p1, p2,
lastSign * Instruction.BEGIN_LINE * (lineCap || 1), numVertices);
numVertices = this.addVertices_([0, 0], p1, p2,
-lastSign * Instruction.BEGIN_LINE * (lineCap || 1), numVertices);
lastIndex = numVertices / 7 - 1;
continue;
}
} else if (i === end - stride) {
//Last vertex.
if (closed) {
//Same as the first vertex.
p2 = startCoords;
break;
} else {
p0 = p0 || [0, 0];
numVertices = this.addVertices_(p0, p1, [0, 0],
lastSign * Instruction.END_LINE * (lineCap || 1), numVertices);
numVertices = this.addVertices_(p0, p1, [0, 0],
-lastSign * Instruction.END_LINE * (lineCap || 1), numVertices);
this.indices[numIndices++] = n;
this.indices[numIndices++] = lastIndex - 1;
this.indices[numIndices++] = lastIndex;
this.indices[numIndices++] = lastIndex;
this.indices[numIndices++] = n + 1;
this.indices[numIndices++] = n;
if (lineCap) {
numVertices = this.addVertices_(p0, p1, [0, 0],
lastSign * Instruction.END_LINE_CAP * lineCap, numVertices);
numVertices = this.addVertices_(p0, p1, [0, 0],
-lastSign * Instruction.END_LINE_CAP * lineCap, numVertices);
this.indices[numIndices++] = n + 2;
this.indices[numIndices++] = n;
this.indices[numIndices++] = n + 1;
this.indices[numIndices++] = n + 1;
this.indices[numIndices++] = n + 3;
this.indices[numIndices++] = n + 2;
}
break;
}
} else {
p2 = [flatCoordinates[i + stride], flatCoordinates[i + stride + 1]];
}
// We group CW and straight lines, thus the not so inituitive CCW checking function.
sign = triangleIsCounterClockwise(p0[0], p0[1], p1[0], p1[1], p2[0], p2[1])
? -1 : 1;
numVertices = this.addVertices_(p0, p1, p2,
sign * Instruction.BEVEL_FIRST * (lineJoin || 1), numVertices);
numVertices = this.addVertices_(p0, p1, p2,
sign * Instruction.BEVEL_SECOND * (lineJoin || 1), numVertices);
numVertices = this.addVertices_(p0, p1, p2,
-sign * Instruction.MITER_BOTTOM * (lineJoin || 1), numVertices);
if (i > offset) {
this.indices[numIndices++] = n;
this.indices[numIndices++] = lastIndex - 1;
this.indices[numIndices++] = lastIndex;
this.indices[numIndices++] = n + 2;
this.indices[numIndices++] = n;
this.indices[numIndices++] = lastSign * sign > 0 ? lastIndex : lastIndex - 1;
}
this.indices[numIndices++] = n;
this.indices[numIndices++] = n + 2;
this.indices[numIndices++] = n + 1;
lastIndex = n + 2;
lastSign = sign;
//Add miter
if (lineJoin) {
numVertices = this.addVertices_(p0, p1, p2,
sign * Instruction.MITER_TOP * lineJoin, numVertices);
this.indices[numIndices++] = n + 1;
this.indices[numIndices++] = n + 3;
this.indices[numIndices++] = n;
}
}
if (closed) {
n = n || numVertices / 7;
sign = linearRingIsClockwise([p0[0], p0[1], p1[0], p1[1], p2[0], p2[1]], 0, 6, 2)
? 1 : -1;
numVertices = this.addVertices_(p0, p1, p2,
sign * Instruction.BEVEL_FIRST * (lineJoin || 1), numVertices);
numVertices = this.addVertices_(p0, p1, p2,
-sign * Instruction.MITER_BOTTOM * (lineJoin || 1), numVertices);
this.indices[numIndices++] = n;
this.indices[numIndices++] = lastIndex - 1;
this.indices[numIndices++] = lastIndex;
this.indices[numIndices++] = n + 1;
this.indices[numIndices++] = n;
this.indices[numIndices++] = lastSign * sign > 0 ? lastIndex : lastIndex - 1;
}
}
/**
* @param {Array<number>} p0 Last coordinates.
* @param {Array<number>} p1 Current coordinates.
* @param {Array<number>} p2 Next coordinates.
* @param {number} product Sign, instruction, and rounding product.
* @param {number} numVertices Vertex counter.
* @return {number} Vertex counter.
* @private
*/
addVertices_(p0, p1, p2, product, numVertices) {
this.vertices[numVertices++] = p0[0];
this.vertices[numVertices++] = p0[1];
this.vertices[numVertices++] = p1[0];
this.vertices[numVertices++] = p1[1];
this.vertices[numVertices++] = p2[0];
this.vertices[numVertices++] = p2[1];
this.vertices[numVertices++] = product;
return numVertices;
}
/**
* Check if the linestring can be drawn (i. e. valid).
* @param {Array<number>} flatCoordinates Flat coordinates.
* @param {number} offset Offset.
* @param {number} end End.
* @param {number} stride Stride.
* @return {boolean} The linestring can be drawn.
* @private
*/
isValid_(flatCoordinates, offset, end, stride) {
const range = end - offset;
if (range < stride * 2) {
return false;
} else if (range === stride * 2) {
const firstP = [flatCoordinates[offset], flatCoordinates[offset + 1]];
const lastP = [flatCoordinates[offset + stride], flatCoordinates[offset + stride + 1]];
return !equals(firstP, lastP);
}
return true;
}
/**
* @inheritDoc
*/
drawLineString(lineStringGeometry, feature) {
let flatCoordinates = lineStringGeometry.getFlatCoordinates();
const stride = lineStringGeometry.getStride();
if (this.isValid_(flatCoordinates, 0, flatCoordinates.length, stride)) {
flatCoordinates = translate(flatCoordinates, 0, flatCoordinates.length,
stride, -this.origin[0], -this.origin[1]);
if (this.state_.changed) {
this.styleIndices_.push(this.indices.length);
this.state_.changed = false;
}
this.startIndices.push(this.indices.length);
this.startIndicesFeature.push(feature);
this.drawCoordinates_(
flatCoordinates, 0, flatCoordinates.length, stride);
}
}
/**
* @inheritDoc
*/
drawMultiLineString(multiLineStringGeometry, feature) {
const indexCount = this.indices.length;
const ends = multiLineStringGeometry.getEnds();
ends.unshift(0);
const flatCoordinates = multiLineStringGeometry.getFlatCoordinates();
const stride = multiLineStringGeometry.getStride();
let i, ii;
if (ends.length > 1) {
for (i = 1, ii = ends.length; i < ii; ++i) {
if (this.isValid_(flatCoordinates, ends[i - 1], ends[i], stride)) {
const lineString = translate(flatCoordinates, ends[i - 1], ends[i],
stride, -this.origin[0], -this.origin[1]);
this.drawCoordinates_(
lineString, 0, lineString.length, stride);
}
}
}
if (this.indices.length > indexCount) {
this.startIndices.push(indexCount);
this.startIndicesFeature.push(feature);
if (this.state_.changed) {
this.styleIndices_.push(indexCount);
this.state_.changed = false;
}
}
}
/**
* @param {Array<number>} flatCoordinates Flat coordinates.
* @param {Array<Array<number>>} holeFlatCoordinates Hole flat coordinates.
* @param {number} stride Stride.
*/
drawPolygonCoordinates(flatCoordinates, holeFlatCoordinates, stride) {
if (!lineStringIsClosed(flatCoordinates, 0, flatCoordinates.length, stride)) {
flatCoordinates.push(flatCoordinates[0]);
flatCoordinates.push(flatCoordinates[1]);
}
this.drawCoordinates_(flatCoordinates, 0, flatCoordinates.length, stride);
if (holeFlatCoordinates.length) {
let i, ii;
for (i = 0, ii = holeFlatCoordinates.length; i < ii; ++i) {
if (!lineStringIsClosed(holeFlatCoordinates[i], 0, holeFlatCoordinates[i].length, stride)) {
holeFlatCoordinates[i].push(holeFlatCoordinates[i][0]);
holeFlatCoordinates[i].push(holeFlatCoordinates[i][1]);
}
this.drawCoordinates_(holeFlatCoordinates[i], 0,
holeFlatCoordinates[i].length, stride);
}
}
}
/**
* @param {import("../../Feature.js").default|import("../Feature.js").default} feature Feature.
* @param {number=} opt_index Index count.
*/
setPolygonStyle(feature, opt_index) {
const index = opt_index === undefined ? this.indices.length : opt_index;
this.startIndices.push(index);
this.startIndicesFeature.push(feature);
if (this.state_.changed) {
this.styleIndices_.push(index);
this.state_.changed = false;
}
}
/**
* @return {number} Current index.
*/
getCurrentIndex() {
return this.indices.length;
}
/**
* @inheritDoc
**/
finish(context) {
// create, bind, and populate the vertices buffer
this.verticesBuffer = new WebGLBuffer(this.vertices);
// create, bind, and populate the indices buffer
this.indicesBuffer = new WebGLBuffer(this.indices);
this.startIndices.push(this.indices.length);
//Clean up, if there is nothing to draw
if (this.styleIndices_.length === 0 && this.styles_.length > 0) {
this.styles_ = [];
}
this.vertices = null;
this.indices = null;
}
/**
* @inheritDoc
*/
getDeleteResourcesFunction(context) {
const verticesBuffer = this.verticesBuffer;
const indicesBuffer = this.indicesBuffer;
return function() {
context.deleteBuffer(verticesBuffer);
context.deleteBuffer(indicesBuffer);
};
}
/**
* @inheritDoc
*/
setUpProgram(gl, context, size, pixelRatio) {
// get the program
const program = context.getProgram(fragment, vertex);
// get the locations
let locations;
if (!this.defaultLocations_) {
locations = new Locations(gl, program);
this.defaultLocations_ = locations;
} else {
locations = this.defaultLocations_;
}
context.useProgram(program);
// enable the vertex attrib arrays
gl.enableVertexAttribArray(locations.a_lastPos);
gl.vertexAttribPointer(locations.a_lastPos, 2, FLOAT,
false, 28, 0);
gl.enableVertexAttribArray(locations.a_position);
gl.vertexAttribPointer(locations.a_position, 2, FLOAT,
false, 28, 8);
gl.enableVertexAttribArray(locations.a_nextPos);
gl.vertexAttribPointer(locations.a_nextPos, 2, FLOAT,
false, 28, 16);
gl.enableVertexAttribArray(locations.a_direction);
gl.vertexAttribPointer(locations.a_direction, 1, FLOAT,
false, 28, 24);
// Enable renderer specific uniforms.
gl.uniform2fv(locations.u_size, size);
gl.uniform1f(locations.u_pixelRatio, pixelRatio);
return locations;
}
/**
* @inheritDoc
*/
shutDownProgram(gl, locations) {
gl.disableVertexAttribArray(locations.a_lastPos);
gl.disableVertexAttribArray(locations.a_position);
gl.disableVertexAttribArray(locations.a_nextPos);
gl.disableVertexAttribArray(locations.a_direction);
}
/**
* @inheritDoc
*/
drawReplay(gl, context, skippedFeaturesHash, hitDetection) {
//Save GL parameters.
const tmpDepthFunc = /** @type {number} */ (gl.getParameter(gl.DEPTH_FUNC));
const tmpDepthMask = /** @type {boolean} */ (gl.getParameter(gl.DEPTH_WRITEMASK));
if (!hitDetection) {
gl.enable(gl.DEPTH_TEST);
gl.depthMask(true);
gl.depthFunc(gl.NOTEQUAL);
}
if (!isEmpty(skippedFeaturesHash)) {
this.drawReplaySkipping_(gl, context, skippedFeaturesHash);
} else {
//Draw by style groups to minimize drawElements() calls.
let i, start, end, nextStyle;
end = this.startIndices[this.startIndices.length - 1];
for (i = this.styleIndices_.length - 1; i >= 0; --i) {
start = this.styleIndices_[i];
nextStyle = this.styles_[i];
this.setStrokeStyle_(gl, nextStyle[0], nextStyle[1], nextStyle[2]);
this.drawElements(gl, context, start, end);
gl.clear(gl.DEPTH_BUFFER_BIT);
end = start;
}
}
if (!hitDetection) {
gl.disable(gl.DEPTH_TEST);
gl.clear(gl.DEPTH_BUFFER_BIT);
//Restore GL parameters.
gl.depthMask(tmpDepthMask);
gl.depthFunc(tmpDepthFunc);
}
}
/**
* @private
* @param {WebGLRenderingContext} gl gl.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {Object} skippedFeaturesHash Ids of features to skip.
*/
drawReplaySkipping_(gl, context, skippedFeaturesHash) {
let i, start, end, nextStyle, groupStart, feature, featureIndex, featureStart;
featureIndex = this.startIndices.length - 2;
end = start = this.startIndices[featureIndex + 1];
for (i = this.styleIndices_.length - 1; i >= 0; --i) {
nextStyle = this.styles_[i];
this.setStrokeStyle_(gl, nextStyle[0], nextStyle[1], nextStyle[2]);
groupStart = this.styleIndices_[i];
while (featureIndex >= 0 &&
this.startIndices[featureIndex] >= groupStart) {
featureStart = this.startIndices[featureIndex];
feature = this.startIndicesFeature[featureIndex];
if (skippedFeaturesHash[getUid(feature)]) {
if (start !== end) {
this.drawElements(gl, context, start, end);
gl.clear(gl.DEPTH_BUFFER_BIT);
}
end = featureStart;
}
featureIndex--;
start = featureStart;
}
if (start !== end) {
this.drawElements(gl, context, start, end);
gl.clear(gl.DEPTH_BUFFER_BIT);
}
start = end = groupStart;
}
}
/**
* @inheritDoc
*/
drawHitDetectionReplayOneByOne(gl, context, skippedFeaturesHash, featureCallback, opt_hitExtent) {
let i, start, end, nextStyle, groupStart, feature, featureIndex;
featureIndex = this.startIndices.length - 2;
end = this.startIndices[featureIndex + 1];
for (i = this.styleIndices_.length - 1; i >= 0; --i) {
nextStyle = this.styles_[i];
this.setStrokeStyle_(gl, nextStyle[0], nextStyle[1], nextStyle[2]);
groupStart = this.styleIndices_[i];
while (featureIndex >= 0 &&
this.startIndices[featureIndex] >= groupStart) {
start = this.startIndices[featureIndex];
feature = this.startIndicesFeature[featureIndex];
if (skippedFeaturesHash[getUid(feature)] === undefined &&
feature.getGeometry() &&
(opt_hitExtent === undefined || intersects(
/** @type {Array<number>} */ (opt_hitExtent),
feature.getGeometry().getExtent()))) {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
this.drawElements(gl, context, start, end);
const result = featureCallback(feature);
if (result) {
return result;
}
}
featureIndex--;
end = start;
}
}
return undefined;
}
/**
* @private
* @param {WebGLRenderingContext} gl gl.
* @param {Array<number>} color Color.
* @param {number} lineWidth Line width.
* @param {number} miterLimit Miter limit.
*/
setStrokeStyle_(gl, color, lineWidth, miterLimit) {
gl.uniform4fv(this.defaultLocations_.u_color, color);
gl.uniform1f(this.defaultLocations_.u_lineWidth, lineWidth);
gl.uniform1f(this.defaultLocations_.u_miterLimit, miterLimit);
}
/**
* @inheritDoc
*/
setFillStrokeStyle(fillStyle, strokeStyle) {
const strokeStyleLineCap = strokeStyle.getLineCap();
this.state_.lineCap = strokeStyleLineCap !== undefined ?
strokeStyleLineCap : DEFAULT_LINECAP;
const strokeStyleLineDash = strokeStyle.getLineDash();
this.state_.lineDash = strokeStyleLineDash ?
strokeStyleLineDash : DEFAULT_LINEDASH;
const strokeStyleLineDashOffset = strokeStyle.getLineDashOffset();
this.state_.lineDashOffset = strokeStyleLineDashOffset ?
strokeStyleLineDashOffset : DEFAULT_LINEDASHOFFSET;
const strokeStyleLineJoin = strokeStyle.getLineJoin();
this.state_.lineJoin = strokeStyleLineJoin !== undefined ?
strokeStyleLineJoin : DEFAULT_LINEJOIN;
let strokeStyleColor = strokeStyle.getColor();
if (!(strokeStyleColor instanceof CanvasGradient) &&
!(strokeStyleColor instanceof CanvasPattern)) {
strokeStyleColor = asArray(strokeStyleColor).map(function(c, i) {
return i != 3 ? c / 255 : c;
}) || DEFAULT_STROKESTYLE;
} else {
strokeStyleColor = DEFAULT_STROKESTYLE;
}
let strokeStyleWidth = strokeStyle.getWidth();
strokeStyleWidth = strokeStyleWidth !== undefined ?
strokeStyleWidth : DEFAULT_LINEWIDTH;
let strokeStyleMiterLimit = strokeStyle.getMiterLimit();
strokeStyleMiterLimit = strokeStyleMiterLimit !== undefined ?
strokeStyleMiterLimit : DEFAULT_MITERLIMIT;
if (!this.state_.strokeColor || !equals(this.state_.strokeColor, strokeStyleColor) ||
this.state_.lineWidth !== strokeStyleWidth || this.state_.miterLimit !== strokeStyleMiterLimit) {
this.state_.changed = true;
this.state_.strokeColor = strokeStyleColor;
this.state_.lineWidth = strokeStyleWidth;
this.state_.miterLimit = strokeStyleMiterLimit;
this.styles_.push([strokeStyleColor, strokeStyleWidth, strokeStyleMiterLimit]);
}
}
}
export default WebGLLineStringReplay;

File diff suppressed because it is too large Load Diff

View File

@@ -1,369 +0,0 @@
/**
* @module ol/render/webgl/Replay
*/
import {abstract} from '../../util.js';
import {getCenter} from '../../extent.js';
import VectorContext from '../VectorContext.js';
import {
create as createTransform,
reset as resetTransform,
rotate as rotateTransform,
scale as scaleTransform,
translate as translateTransform
} from '../../transform.js';
import {create, fromTransform} from '../../vec/mat4.js';
import {ARRAY_BUFFER, ELEMENT_ARRAY_BUFFER, TRIANGLES,
UNSIGNED_INT, UNSIGNED_SHORT} from '../../webgl.js';
class WebGLReplay extends VectorContext {
/**
* @param {number} tolerance Tolerance.
* @param {import("../../extent.js").Extent} maxExtent Max extent.
*/
constructor(tolerance, maxExtent) {
super();
/**
* @protected
* @type {number}
*/
this.tolerance = tolerance;
/**
* @protected
* @const
* @type {import("../../extent.js").Extent}
*/
this.maxExtent = maxExtent;
/**
* The origin of the coordinate system for the point coordinates sent to
* the GPU. To eliminate jitter caused by precision problems in the GPU
* we use the "Rendering Relative to Eye" technique described in the "3D
* Engine Design for Virtual Globes" book.
* @protected
* @type {import("../../coordinate.js").Coordinate}
*/
this.origin = getCenter(maxExtent);
/**
* @private
* @type {import("../../transform.js").Transform}
*/
this.projectionMatrix_ = createTransform();
/**
* @private
* @type {import("../../transform.js").Transform}
*/
this.offsetRotateMatrix_ = createTransform();
/**
* @private
* @type {import("../../transform.js").Transform}
*/
this.offsetScaleMatrix_ = createTransform();
/**
* @private
* @type {Array<number>}
*/
this.tmpMat4_ = create();
/**
* @protected
* @type {Array<number>}
*/
this.indices = [];
/**
* @protected
* @type {?import("../../webgl/Buffer.js").default}
*/
this.indicesBuffer = null;
/**
* Start index per feature (the index).
* @protected
* @type {Array<number>}
*/
this.startIndices = [];
/**
* Start index per feature (the feature).
* @protected
* @type {Array<import("../../Feature.js").default|import("../Feature.js").default>}
*/
this.startIndicesFeature = [];
/**
* @protected
* @type {Array<number>}
*/
this.vertices = [];
/**
* @protected
* @type {?import("../../webgl/Buffer.js").default}
*/
this.verticesBuffer = null;
/**
* Optional parameter for PolygonReplay instances.
* @protected
* @type {import("./LineStringReplay.js").default|undefined}
*/
this.lineStringReplay = undefined;
}
/**
* @abstract
* @param {import("../../webgl/Context.js").default} context WebGL context.
* @return {function()} Delete resources function.
*/
getDeleteResourcesFunction(context) {
return abstract();
}
/**
* @abstract
* @param {import("../../webgl/Context.js").default} context Context.
*/
finish(context) {
abstract();
}
/**
* @abstract
* @protected
* @param {WebGLRenderingContext} gl gl.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {import("../../size.js").Size} size Size.
* @param {number} pixelRatio Pixel ratio.
* @return {import("./circlereplay/defaultshader/Locations.js").default|
import("./linestringreplay/defaultshader/Locations.js").default|
import("./polygonreplay/defaultshader/Locations.js").default|
import("./texturereplay/defaultshader/Locations.js").default} Locations.
*/
setUpProgram(gl, context, size, pixelRatio) {
return abstract();
}
/**
* @abstract
* @protected
* @param {WebGLRenderingContext} gl gl.
* @param {import("./circlereplay/defaultshader/Locations.js").default|
import("./linestringreplay/defaultshader/Locations.js").default|
import("./polygonreplay/defaultshader/Locations.js").default|
import("./texturereplay/defaultshader/Locations.js").default} locations Locations.
*/
shutDownProgram(gl, locations) {
abstract();
}
/**
* @abstract
* @protected
* @param {WebGLRenderingContext} gl gl.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {Object<string, boolean>} skippedFeaturesHash Ids of features to skip.
* @param {boolean} hitDetection Hit detection mode.
*/
drawReplay(gl, context, skippedFeaturesHash, hitDetection) {
abstract();
}
/**
* @abstract
* @protected
* @param {WebGLRenderingContext} gl gl.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {Object<string, boolean>} skippedFeaturesHash Ids of features to skip.
* @param {function((import("../../Feature.js").default|import("../Feature.js").default)): T|undefined} featureCallback Feature callback.
* @param {import("../../extent.js").Extent=} opt_hitExtent Hit extent: Only features intersecting this extent are checked.
* @return {T|undefined} Callback result.
* @template T
*/
drawHitDetectionReplayOneByOne(gl, context, skippedFeaturesHash, featureCallback, opt_hitExtent) {
return abstract();
}
/**
* @protected
* @param {WebGLRenderingContext} gl gl.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {Object<string, boolean>} skippedFeaturesHash Ids of features to skip.
* @param {function((import("../../Feature.js").default|import("../Feature.js").default)): T|undefined} featureCallback Feature callback.
* @param {boolean} oneByOne Draw features one-by-one for the hit-detecion.
* @param {import("../../extent.js").Extent=} opt_hitExtent Hit extent: Only features intersecting this extent are checked.
* @return {T|undefined} Callback result.
* @template T
*/
drawHitDetectionReplay(gl, context, skippedFeaturesHash, featureCallback, oneByOne, opt_hitExtent) {
if (!oneByOne) {
// draw all hit-detection features in "once" (by texture group)
return this.drawHitDetectionReplayAll(gl, context,
skippedFeaturesHash, featureCallback);
} else {
// draw hit-detection features one by one
return this.drawHitDetectionReplayOneByOne(gl, context,
skippedFeaturesHash, featureCallback, opt_hitExtent);
}
}
/**
* @protected
* @param {WebGLRenderingContext} gl gl.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {Object<string, boolean>} skippedFeaturesHash Ids of features to skip.
* @param {function((import("../../Feature.js").default|import("../Feature.js").default)): T|undefined} featureCallback Feature callback.
* @return {T|undefined} Callback result.
* @template T
*/
drawHitDetectionReplayAll(gl, context, skippedFeaturesHash, featureCallback) {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
this.drawReplay(gl, context, skippedFeaturesHash, true);
const result = featureCallback(null);
if (result) {
return result;
} else {
return undefined;
}
}
/**
* @param {import("../../webgl/Context.js").default} context Context.
* @param {import("../../coordinate.js").Coordinate} center Center.
* @param {number} resolution Resolution.
* @param {number} rotation Rotation.
* @param {import("../../size.js").Size} size Size.
* @param {number} pixelRatio Pixel ratio.
* @param {number} opacity Global opacity.
* @param {Object<string, boolean>} skippedFeaturesHash Ids of features to skip.
* @param {function((import("../../Feature.js").default|import("../Feature.js").default)): T|undefined} featureCallback Feature callback.
* @param {boolean} oneByOne Draw features one-by-one for the hit-detecion.
* @param {import("../../extent.js").Extent=} opt_hitExtent Hit extent: Only features intersecting this extent are checked.
* @return {T|undefined} Callback result.
* @template T
*/
replay(
context,
center,
resolution,
rotation,
size,
pixelRatio,
opacity,
skippedFeaturesHash,
featureCallback,
oneByOne,
opt_hitExtent
) {
const gl = context.getGL();
let tmpStencil, tmpStencilFunc, tmpStencilMaskVal, tmpStencilRef, tmpStencilMask,
tmpStencilOpFail, tmpStencilOpPass, tmpStencilOpZFail;
if (this.lineStringReplay) {
tmpStencil = gl.isEnabled(gl.STENCIL_TEST);
tmpStencilFunc = gl.getParameter(gl.STENCIL_FUNC);
tmpStencilMaskVal = gl.getParameter(gl.STENCIL_VALUE_MASK);
tmpStencilRef = gl.getParameter(gl.STENCIL_REF);
tmpStencilMask = gl.getParameter(gl.STENCIL_WRITEMASK);
tmpStencilOpFail = gl.getParameter(gl.STENCIL_FAIL);
tmpStencilOpPass = gl.getParameter(gl.STENCIL_PASS_DEPTH_PASS);
tmpStencilOpZFail = gl.getParameter(gl.STENCIL_PASS_DEPTH_FAIL);
gl.enable(gl.STENCIL_TEST);
gl.clear(gl.STENCIL_BUFFER_BIT);
gl.stencilMask(255);
gl.stencilFunc(gl.ALWAYS, 1, 255);
gl.stencilOp(gl.KEEP, gl.KEEP, gl.REPLACE);
this.lineStringReplay.replay(context,
center, resolution, rotation, size, pixelRatio,
opacity, skippedFeaturesHash,
featureCallback, oneByOne, opt_hitExtent);
gl.stencilMask(0);
gl.stencilFunc(gl.NOTEQUAL, 1, 255);
}
context.bindBuffer(ARRAY_BUFFER, this.verticesBuffer);
context.bindBuffer(ELEMENT_ARRAY_BUFFER, this.indicesBuffer);
const locations = this.setUpProgram(gl, context, size, pixelRatio);
// set the "uniform" values
const projectionMatrix = resetTransform(this.projectionMatrix_);
scaleTransform(projectionMatrix, 2 / (resolution * size[0]), 2 / (resolution * size[1]));
rotateTransform(projectionMatrix, -rotation);
translateTransform(projectionMatrix, -(center[0] - this.origin[0]), -(center[1] - this.origin[1]));
const offsetScaleMatrix = resetTransform(this.offsetScaleMatrix_);
scaleTransform(offsetScaleMatrix, 2 / size[0], 2 / size[1]);
const offsetRotateMatrix = resetTransform(this.offsetRotateMatrix_);
if (rotation !== 0) {
rotateTransform(offsetRotateMatrix, -rotation);
}
gl.uniformMatrix4fv(locations.u_projectionMatrix, false,
fromTransform(this.tmpMat4_, projectionMatrix));
gl.uniformMatrix4fv(locations.u_offsetScaleMatrix, false,
fromTransform(this.tmpMat4_, offsetScaleMatrix));
gl.uniformMatrix4fv(locations.u_offsetRotateMatrix, false,
fromTransform(this.tmpMat4_, offsetRotateMatrix));
gl.uniform1f(locations.u_opacity, opacity);
// draw!
let result;
if (featureCallback === undefined) {
this.drawReplay(gl, context, skippedFeaturesHash, false);
} else {
// draw feature by feature for the hit-detection
result = this.drawHitDetectionReplay(gl, context, skippedFeaturesHash,
featureCallback, oneByOne, opt_hitExtent);
}
// disable the vertex attrib arrays
this.shutDownProgram(gl, locations);
if (this.lineStringReplay) {
if (!tmpStencil) {
gl.disable(gl.STENCIL_TEST);
}
gl.clear(gl.STENCIL_BUFFER_BIT);
gl.stencilFunc(/** @type {number} */ (tmpStencilFunc),
/** @type {number} */ (tmpStencilRef), /** @type {number} */ (tmpStencilMaskVal));
gl.stencilMask(/** @type {number} */ (tmpStencilMask));
gl.stencilOp(/** @type {number} */ (tmpStencilOpFail),
/** @type {number} */ (tmpStencilOpZFail), /** @type {number} */ (tmpStencilOpPass));
}
return result;
}
/**
* @protected
* @param {WebGLRenderingContext} gl gl.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {number} start Start index.
* @param {number} end End index.
*/
drawElements(gl, context, start, end) {
const elementType = context.hasOESElementIndexUint ?
UNSIGNED_INT : UNSIGNED_SHORT;
const elementSize = context.hasOESElementIndexUint ? 4 : 2;
const numItems = end - start;
const offsetInBytes = start * elementSize;
gl.drawElements(TRIANGLES, numItems, elementType, offsetInBytes);
}
}
export default WebGLReplay;

View File

@@ -1,338 +0,0 @@
/**
* @module ol/render/webgl/ReplayGroup
*/
import {numberSafeCompareFunction} from '../../array.js';
import {buffer, createOrUpdateFromCoordinate} from '../../extent.js';
import {isEmpty} from '../../obj.js';
import {ORDER} from '../replay.js';
import ReplayGroup from '../BuilderGroup.js';
import WebGLCircleReplay from './CircleReplay.js';
import WebGLImageReplay from './ImageReplay.js';
import WebGLLineStringReplay from './LineStringReplay.js';
import WebGLPolygonReplay from './PolygonReplay.js';
import WebGLTextReplay from './TextReplay.js';
/**
* @type {Array<number>}
*/
const HIT_DETECTION_SIZE = [1, 1];
/**
* @type {Object<import("../ReplayType.js").default, typeof import("./Replay.js").default>}
*/
const BATCH_CONSTRUCTORS = {
'Circle': WebGLCircleReplay,
'Image': WebGLImageReplay,
'LineString': WebGLLineStringReplay,
'Polygon': WebGLPolygonReplay,
'Text': WebGLTextReplay
};
class WebGLReplayGroup extends ReplayGroup {
/**
* @param {number} tolerance Tolerance.
* @param {import("../../extent.js").Extent} maxExtent Max extent.
* @param {number=} opt_renderBuffer Render buffer.
*/
constructor(tolerance, maxExtent, opt_renderBuffer) {
super();
/**
* @type {import("../../extent.js").Extent}
* @private
*/
this.maxExtent_ = maxExtent;
/**
* @type {number}
* @private
*/
this.tolerance_ = tolerance;
/**
* @type {number|undefined}
* @private
*/
this.renderBuffer_ = opt_renderBuffer;
/**
* @private
* @type {!Object<string,
* Object<import("../ReplayType.js").default, import("./Replay.js").default>>}
*/
this.replaysByZIndex_ = {};
}
/**
* @inheritDoc
*/
addDeclutter(group) {
return [];
}
/**
* @param {import("../../webgl/Context.js").default} context WebGL context.
* @return {function()} Delete resources function.
*/
getDeleteResourcesFunction(context) {
const functions = [];
let zKey;
for (zKey in this.replaysByZIndex_) {
const replays = this.replaysByZIndex_[zKey];
for (const replayKey in replays) {
functions.push(
replays[replayKey].getDeleteResourcesFunction(context));
}
}
return function() {
const length = functions.length;
let result;
for (let i = 0; i < length; i++) {
result = functions[i].apply(this, arguments);
}
return result;
};
}
/**
* @param {import("../../webgl/Context.js").default} context Context.
*/
finish(context) {
let zKey;
for (zKey in this.replaysByZIndex_) {
const replays = this.replaysByZIndex_[zKey];
for (const replayKey in replays) {
replays[replayKey].finish(context);
}
}
}
/**
* @inheritDoc
*/
getBuilder(zIndex, replayType) {
const zIndexKey = zIndex !== undefined ? zIndex.toString() : '0';
let replays = this.replaysByZIndex_[zIndexKey];
if (replays === undefined) {
replays = {};
this.replaysByZIndex_[zIndexKey] = replays;
}
let replay = replays[replayType];
if (replay === undefined) {
const Constructor = BATCH_CONSTRUCTORS[replayType];
replay = new Constructor(this.tolerance_, this.maxExtent_);
replays[replayType] = replay;
}
return replay;
}
/**
* @inheritDoc
*/
isEmpty() {
return isEmpty(this.replaysByZIndex_);
}
/**
* @param {import("../../webgl/Context.js").default} context Context.
* @param {import("../../coordinate.js").Coordinate} center Center.
* @param {number} resolution Resolution.
* @param {number} rotation Rotation.
* @param {import("../../size.js").Size} size Size.
* @param {number} pixelRatio Pixel ratio.
* @param {number} opacity Global opacity.
* @param {Object<string, boolean>} skippedFeaturesHash Ids of features to skip.
*/
replay(
context,
center,
resolution,
rotation,
size,
pixelRatio,
opacity,
skippedFeaturesHash
) {
/** @type {Array<number>} */
const zs = Object.keys(this.replaysByZIndex_).map(Number);
zs.sort(numberSafeCompareFunction);
let i, ii, j, jj, replays, replay;
for (i = 0, ii = zs.length; i < ii; ++i) {
replays = this.replaysByZIndex_[zs[i].toString()];
for (j = 0, jj = ORDER.length; j < jj; ++j) {
replay = replays[ORDER[j]];
if (replay !== undefined) {
replay.replay(context,
center, resolution, rotation, size, pixelRatio,
opacity, skippedFeaturesHash,
undefined, false);
}
}
}
}
/**
* @private
* @param {import("../../webgl/Context.js").default} context Context.
* @param {import("../../coordinate.js").Coordinate} center Center.
* @param {number} resolution Resolution.
* @param {number} rotation Rotation.
* @param {import("../../size.js").Size} size Size.
* @param {number} pixelRatio Pixel ratio.
* @param {number} opacity Global opacity.
* @param {Object<string, boolean>} skippedFeaturesHash Ids of features to skip.
* @param {function((import("../../Feature.js").default|import("../Feature.js").default)): T|undefined} featureCallback Feature callback.
* @param {boolean} oneByOne Draw features one-by-one for the hit-detecion.
* @param {import("../../extent.js").Extent=} opt_hitExtent Hit extent: Only features intersecting
* this extent are checked.
* @return {T|undefined} Callback result.
* @template T
*/
replayHitDetection_(
context,
center,
resolution,
rotation,
size,
pixelRatio,
opacity,
skippedFeaturesHash,
featureCallback,
oneByOne,
opt_hitExtent
) {
/** @type {Array<number>} */
const zs = Object.keys(this.replaysByZIndex_).map(Number);
zs.sort(function(a, b) {
return b - a;
});
let i, ii, j, replays, replay, result;
for (i = 0, ii = zs.length; i < ii; ++i) {
replays = this.replaysByZIndex_[zs[i].toString()];
for (j = ORDER.length - 1; j >= 0; --j) {
replay = replays[ORDER[j]];
if (replay !== undefined) {
result = replay.replay(context,
center, resolution, rotation, size, pixelRatio, opacity,
skippedFeaturesHash, featureCallback, oneByOne, opt_hitExtent);
if (result) {
return result;
}
}
}
}
return undefined;
}
/**
* @param {import("../../coordinate.js").Coordinate} coordinate Coordinate.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {import("../../coordinate.js").Coordinate} center Center.
* @param {number} resolution Resolution.
* @param {number} rotation Rotation.
* @param {import("../../size.js").Size} size Size.
* @param {number} pixelRatio Pixel ratio.
* @param {number} opacity Global opacity.
* @param {Object<string, boolean>} skippedFeaturesHash Ids of features to skip.
* @param {function((import("../../Feature.js").default|import("../Feature.js").default)): T|undefined} callback Feature callback.
* @return {T|undefined} Callback result.
* @template T
*/
forEachFeatureAtCoordinate(
coordinate,
context,
center,
resolution,
rotation,
size,
pixelRatio,
opacity,
skippedFeaturesHash,
callback
) {
const gl = context.getGL();
gl.bindFramebuffer(
gl.FRAMEBUFFER, context.getHitDetectionFramebuffer());
/**
* @type {import("../../extent.js").Extent}
*/
let hitExtent;
if (this.renderBuffer_ !== undefined) {
// build an extent around the coordinate, so that only features that
// intersect this extent are checked
hitExtent = buffer(createOrUpdateFromCoordinate(coordinate), resolution * this.renderBuffer_);
}
return this.replayHitDetection_(context,
coordinate, resolution, rotation, HIT_DETECTION_SIZE,
pixelRatio, opacity, skippedFeaturesHash,
/**
* @param {import("../../Feature.js").default|import("../Feature.js").default} feature Feature.
* @return {?} Callback result.
*/
function(feature) {
const imageData = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, imageData);
if (imageData[3] > 0) {
const result = callback(feature);
if (result) {
return result;
}
}
}, true, hitExtent);
}
/**
* @param {import("../../coordinate.js").Coordinate} coordinate Coordinate.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {import("../../coordinate.js").Coordinate} center Center.
* @param {number} resolution Resolution.
* @param {number} rotation Rotation.
* @param {import("../../size.js").Size} size Size.
* @param {number} pixelRatio Pixel ratio.
* @param {number} opacity Global opacity.
* @param {Object<string, boolean>} skippedFeaturesHash Ids of features to skip.
* @return {boolean} Is there a feature at the given coordinate?
*/
hasFeatureAtCoordinate(
coordinate,
context,
center,
resolution,
rotation,
size,
pixelRatio,
opacity,
skippedFeaturesHash
) {
const gl = context.getGL();
gl.bindFramebuffer(
gl.FRAMEBUFFER, context.getHitDetectionFramebuffer());
const hasFeature = this.replayHitDetection_(context,
coordinate, resolution, rotation, HIT_DETECTION_SIZE,
pixelRatio, opacity, skippedFeaturesHash,
/**
* @param {import("../../Feature.js").default|import("../Feature.js").default} feature Feature.
* @return {boolean} Is there a feature?
*/
function(feature) {
const imageData = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, imageData);
return imageData[3] > 0;
}, false);
return hasFeature !== undefined;
}
}
export default WebGLReplayGroup;

View File

@@ -1,458 +0,0 @@
/**
* @module ol/render/webgl/TextReplay
*/
import {getUid} from '../../util.js';
import {asColorLike} from '../../colorlike.js';
import {createCanvasContext2D} from '../../dom.js';
import GeometryType from '../../geom/GeometryType.js';
import {CANVAS_LINE_DASH} from '../../has.js';
import {TEXT_ALIGN} from '../replay.js';
import {DEFAULT_FILLSTYLE, DEFAULT_FONT, DEFAULT_LINECAP, DEFAULT_LINEDASH,
DEFAULT_LINEDASHOFFSET, DEFAULT_LINEJOIN, DEFAULT_LINEWIDTH, DEFAULT_MITERLIMIT,
DEFAULT_STROKESTYLE, DEFAULT_TEXTALIGN, DEFAULT_TEXTBASELINE} from '../webgl.js';
import WebGLTextureReplay from './TextureReplay.js';
import AtlasManager from '../../style/AtlasManager.js';
import WebGLBuffer from '../../webgl/Buffer.js';
/**
* @typedef {Object} GlyphAtlas
* @property {import("../../style/AtlasManager.js").default} atlas
* @property {Object<string, number>} width
* @property {number} height
*/
class WebGLTextReplay extends WebGLTextureReplay {
/**
* @param {number} tolerance Tolerance.
* @param {import("../../extent.js").Extent} maxExtent Max extent.
*/
constructor(tolerance, maxExtent) {
super(tolerance, maxExtent);
/**
* @private
* @type {Array<HTMLCanvasElement>}
*/
this.images_ = [];
/**
* @private
* @type {Array<WebGLTexture>}
*/
this.textures_ = [];
/**
* @private
* @type {HTMLCanvasElement}
*/
this.measureCanvas_ = createCanvasContext2D(0, 0).canvas;
/**
* @private
* @type {{strokeColor: (import("../../colorlike.js").ColorLike|null),
* lineCap: (string|undefined),
* lineDash: Array<number>,
* lineDashOffset: (number|undefined),
* lineJoin: (string|undefined),
* lineWidth: number,
* miterLimit: (number|undefined),
* fillColor: (import("../../colorlike.js").ColorLike|null),
* font: (string|undefined),
* scale: (number|undefined)}}
*/
this.state_ = {
strokeColor: null,
lineCap: undefined,
lineDash: null,
lineDashOffset: undefined,
lineJoin: undefined,
lineWidth: 0,
miterLimit: undefined,
fillColor: null,
font: undefined,
scale: undefined
};
/**
* @private
* @type {string}
*/
this.text_ = '';
/**
* @private
* @type {number|undefined}
*/
this.textAlign_ = undefined;
/**
* @private
* @type {number|undefined}
*/
this.textBaseline_ = undefined;
/**
* @private
* @type {number|undefined}
*/
this.offsetX_ = undefined;
/**
* @private
* @type {number|undefined}
*/
this.offsetY_ = undefined;
/**
* @private
* @type {Object<string, GlyphAtlas>}
*/
this.atlases_ = {};
/**
* @private
* @type {GlyphAtlas|undefined}
*/
this.currAtlas_ = undefined;
this.scale = 1;
this.opacity = 1;
}
/**
* @inheritDoc
*/
drawText(geometry, feature) {
if (this.text_) {
let flatCoordinates = null;
const offset = 0;
let end = 2;
let stride = 2;
switch (geometry.getType()) {
case GeometryType.POINT:
case GeometryType.MULTI_POINT:
flatCoordinates = geometry.getFlatCoordinates();
end = flatCoordinates.length;
stride = geometry.getStride();
break;
case GeometryType.CIRCLE:
flatCoordinates = /** @type {import("../../geom/Circle.js").default} */ (geometry).getCenter();
break;
case GeometryType.LINE_STRING:
flatCoordinates = /** @type {import("../../geom/LineString.js").default} */ (geometry).getFlatMidpoint();
break;
case GeometryType.MULTI_LINE_STRING:
flatCoordinates = /** @type {import("../../geom/MultiLineString.js").default} */ (geometry).getFlatMidpoints();
end = flatCoordinates.length;
break;
case GeometryType.POLYGON:
flatCoordinates = /** @type {import("../../geom/Polygon.js").default} */ (geometry).getFlatInteriorPoint();
break;
case GeometryType.MULTI_POLYGON:
flatCoordinates = /** @type {import("../../geom/MultiPolygon.js").default} */ (geometry).getFlatInteriorPoints();
end = flatCoordinates.length;
break;
default:
}
this.startIndices.push(this.indices.length);
this.startIndicesFeature.push(feature);
const glyphAtlas = this.currAtlas_;
const lines = this.text_.split('\n');
const textSize = this.getTextSize_(lines);
let i, ii, j, jj, currX, currY, charArr, charInfo;
const anchorX = Math.round(textSize[0] * this.textAlign_ - this.offsetX_);
const anchorY = Math.round(textSize[1] * this.textBaseline_ - this.offsetY_);
const lineWidth = (this.state_.lineWidth / 2) * this.state_.scale;
for (i = 0, ii = lines.length; i < ii; ++i) {
currX = 0;
currY = glyphAtlas.height * i;
charArr = lines[i].split('');
for (j = 0, jj = charArr.length; j < jj; ++j) {
charInfo = glyphAtlas.atlas.getInfo(charArr[j]);
if (charInfo) {
const image = charInfo.image;
this.anchorX = anchorX - currX;
this.anchorY = anchorY - currY;
this.originX = j === 0 ? charInfo.offsetX - lineWidth : charInfo.offsetX;
this.originY = charInfo.offsetY;
this.height = glyphAtlas.height;
this.width = j === 0 || j === charArr.length - 1 ?
glyphAtlas.width[charArr[j]] + lineWidth : glyphAtlas.width[charArr[j]];
this.imageHeight = image.height;
this.imageWidth = image.width;
if (this.images_.length === 0) {
this.images_.push(image);
} else {
const currentImage = this.images_[this.images_.length - 1];
if (getUid(currentImage) != getUid(image)) {
this.groupIndices.push(this.indices.length);
this.images_.push(image);
}
}
this.drawText_(flatCoordinates, offset, end, stride);
}
currX += this.width;
}
}
}
}
/**
* @private
* @param {Array<string>} lines Label to draw split to lines.
* @return {Array<number>} Size of the label in pixels.
*/
getTextSize_(lines) {
const self = this;
const glyphAtlas = this.currAtlas_;
const textHeight = lines.length * glyphAtlas.height;
//Split every line to an array of chars, sum up their width, and select the longest.
const textWidth = lines.map(function(str) {
let sum = 0;
for (let i = 0, ii = str.length; i < ii; ++i) {
const curr = str[i];
if (!glyphAtlas.width[curr]) {
self.addCharToAtlas_(curr);
}
sum += glyphAtlas.width[curr] ? glyphAtlas.width[curr] : 0;
}
return sum;
}).reduce(function(max, curr) {
return Math.max(max, curr);
});
return [textWidth, textHeight];
}
/**
* @private
* @param {Array<number>} flatCoordinates Flat coordinates.
* @param {number} offset Offset.
* @param {number} end End.
* @param {number} stride Stride.
*/
drawText_(flatCoordinates, offset, end, stride) {
for (let i = offset, ii = end; i < ii; i += stride) {
this.drawCoordinates(flatCoordinates, offset, end, stride);
}
}
/**
* @private
* @param {string} char Character.
*/
addCharToAtlas_(char) {
if (char.length === 1) {
const glyphAtlas = this.currAtlas_;
const state = this.state_;
const mCtx = this.measureCanvas_.getContext('2d');
mCtx.font = state.font;
const width = Math.ceil(mCtx.measureText(char).width * state.scale);
const info = glyphAtlas.atlas.add(char, width, glyphAtlas.height,
function(ctx, x, y) {
//Parameterize the canvas
ctx.font = /** @type {string} */ (state.font);
ctx.fillStyle = state.fillColor;
ctx.strokeStyle = state.strokeColor;
ctx.lineWidth = state.lineWidth;
ctx.lineCap = /** @type {CanvasLineCap} */ (state.lineCap);
ctx.lineJoin = /** @type {CanvasLineJoin} */ (state.lineJoin);
ctx.miterLimit = /** @type {number} */ (state.miterLimit);
ctx.textAlign = 'left';
ctx.textBaseline = 'top';
if (CANVAS_LINE_DASH && state.lineDash) {
//FIXME: use pixelRatio
ctx.setLineDash(state.lineDash);
ctx.lineDashOffset = /** @type {number} */ (state.lineDashOffset);
}
if (state.scale !== 1) {
//FIXME: use pixelRatio
ctx.setTransform(/** @type {number} */ (state.scale), 0, 0,
/** @type {number} */ (state.scale), 0, 0);
}
//Draw the character on the canvas
if (state.strokeColor) {
ctx.strokeText(char, x, y);
}
if (state.fillColor) {
ctx.fillText(char, x, y);
}
});
if (info) {
glyphAtlas.width[char] = width;
}
}
}
/**
* @inheritDoc
*/
finish(context) {
const gl = context.getGL();
this.groupIndices.push(this.indices.length);
this.hitDetectionGroupIndices = this.groupIndices;
// create, bind, and populate the vertices buffer
this.verticesBuffer = new WebGLBuffer(this.vertices);
// create, bind, and populate the indices buffer
this.indicesBuffer = new WebGLBuffer(this.indices);
// create textures
/** @type {Object<string, WebGLTexture>} */
const texturePerImage = {};
this.createTextures(this.textures_, this.images_, texturePerImage, gl);
this.state_ = {
strokeColor: null,
lineCap: undefined,
lineDash: null,
lineDashOffset: undefined,
lineJoin: undefined,
lineWidth: 0,
miterLimit: undefined,
fillColor: null,
font: undefined,
scale: undefined
};
this.text_ = '';
this.textAlign_ = undefined;
this.textBaseline_ = undefined;
this.offsetX_ = undefined;
this.offsetY_ = undefined;
this.images_ = null;
this.atlases_ = {};
this.currAtlas_ = undefined;
super.finish(context);
}
/**
* @inheritDoc
*/
setTextStyle(textStyle) {
const state = this.state_;
const textFillStyle = textStyle.getFill();
const textStrokeStyle = textStyle.getStroke();
if (!textStyle || !textStyle.getText() || (!textFillStyle && !textStrokeStyle)) {
this.text_ = '';
} else {
if (!textFillStyle) {
state.fillColor = null;
} else {
const textFillStyleColor = textFillStyle.getColor();
state.fillColor = asColorLike(textFillStyleColor ?
textFillStyleColor : DEFAULT_FILLSTYLE);
}
if (!textStrokeStyle) {
state.strokeColor = null;
state.lineWidth = 0;
} else {
const textStrokeStyleColor = textStrokeStyle.getColor();
state.strokeColor = asColorLike(textStrokeStyleColor ?
textStrokeStyleColor : DEFAULT_STROKESTYLE);
state.lineWidth = textStrokeStyle.getWidth() || DEFAULT_LINEWIDTH;
state.lineCap = textStrokeStyle.getLineCap() || DEFAULT_LINECAP;
state.lineDashOffset = textStrokeStyle.getLineDashOffset() || DEFAULT_LINEDASHOFFSET;
state.lineJoin = textStrokeStyle.getLineJoin() || DEFAULT_LINEJOIN;
state.miterLimit = textStrokeStyle.getMiterLimit() || DEFAULT_MITERLIMIT;
const lineDash = textStrokeStyle.getLineDash();
state.lineDash = lineDash ? lineDash.slice() : DEFAULT_LINEDASH;
}
state.font = textStyle.getFont() || DEFAULT_FONT;
state.scale = textStyle.getScale() || 1;
this.text_ = /** @type {string} */ (textStyle.getText());
const textAlign = TEXT_ALIGN[textStyle.getTextAlign()];
const textBaseline = TEXT_ALIGN[textStyle.getTextBaseline()];
this.textAlign_ = textAlign === undefined ?
DEFAULT_TEXTALIGN : textAlign;
this.textBaseline_ = textBaseline === undefined ?
DEFAULT_TEXTBASELINE : textBaseline;
this.offsetX_ = textStyle.getOffsetX() || 0;
this.offsetY_ = textStyle.getOffsetY() || 0;
this.rotateWithView = !!textStyle.getRotateWithView();
this.rotation = textStyle.getRotation() || 0;
this.currAtlas_ = this.getAtlas_(state);
}
}
/**
* @private
* @param {Object} state Font attributes.
* @return {GlyphAtlas} Glyph atlas.
*/
getAtlas_(state) {
let params = [];
for (const i in state) {
if (state[i] || state[i] === 0) {
if (Array.isArray(state[i])) {
params = params.concat(state[i]);
} else {
params.push(state[i]);
}
}
}
const hash = this.calculateHash_(params);
if (!this.atlases_[hash]) {
const mCtx = this.measureCanvas_.getContext('2d');
mCtx.font = state.font;
const height = Math.ceil((mCtx.measureText('M').width * 1.5 +
state.lineWidth / 2) * state.scale);
this.atlases_[hash] = {
atlas: new AtlasManager({
space: state.lineWidth + 1
}),
width: {},
height: height
};
}
return this.atlases_[hash];
}
/**
* @private
* @param {Array<string|number>} params Array of parameters.
* @return {string} Hash string.
*/
calculateHash_(params) {
//TODO: Create a more performant, reliable, general hash function.
let hash = '';
for (let i = 0, ii = params.length; i < ii; ++i) {
hash += params[i];
}
return hash;
}
/**
* @inheritDoc
*/
getTextures(opt_all) {
return this.textures_;
}
/**
* @inheritDoc
*/
getHitDetectionTextures() {
return this.textures_;
}
}
export default WebGLTextReplay;

View File

@@ -1,480 +0,0 @@
/**
* @module ol/render/webgl/TextureReplay
*/
import {abstract, getUid} from '../../util.js';
import {intersects} from '../../extent.js';
import {isEmpty} from '../../obj.js';
import {fragment, vertex} from './texturereplay/defaultshader.js';
import Locations from './texturereplay/defaultshader/Locations.js';
import WebGLReplay from './Replay.js';
import {CLAMP_TO_EDGE, FLOAT, TEXTURE_2D} from '../../webgl.js';
import {createTexture} from '../../webgl/Context.js';
class WebGLTextureReplay extends WebGLReplay {
/**
* @param {number} tolerance Tolerance.
* @param {import("../../extent.js").Extent} maxExtent Max extent.
*/
constructor(tolerance, maxExtent) {
super(tolerance, maxExtent);
/**
* @type {number|undefined}
* @protected
*/
this.anchorX = undefined;
/**
* @type {number|undefined}
* @protected
*/
this.anchorY = undefined;
/**
* @type {Array<number>}
* @protected
*/
this.groupIndices = [];
/**
* @type {Array<number>}
* @protected
*/
this.hitDetectionGroupIndices = [];
/**
* @type {number|undefined}
* @protected
*/
this.height = undefined;
/**
* @type {number|undefined}
* @protected
*/
this.imageHeight = undefined;
/**
* @type {number|undefined}
* @protected
*/
this.imageWidth = undefined;
/**
* @protected
* @type {import("./texturereplay/defaultshader/Locations.js").default}
*/
this.defaultLocations = null;
/**
* @protected
* @type {number|undefined}
*/
this.opacity = undefined;
/**
* @type {number|undefined}
* @protected
*/
this.originX = undefined;
/**
* @type {number|undefined}
* @protected
*/
this.originY = undefined;
/**
* @protected
* @type {boolean|undefined}
*/
this.rotateWithView = undefined;
/**
* @protected
* @type {number|undefined}
*/
this.rotation = undefined;
/**
* @protected
* @type {number|undefined}
*/
this.scale = undefined;
/**
* @type {number|undefined}
* @protected
*/
this.width = undefined;
}
/**
* @inheritDoc
*/
getDeleteResourcesFunction(context) {
const verticesBuffer = this.verticesBuffer;
const indicesBuffer = this.indicesBuffer;
const textures = this.getTextures(true);
const gl = context.getGL();
return function() {
if (!gl.isContextLost()) {
let i, ii;
for (i = 0, ii = textures.length; i < ii; ++i) {
gl.deleteTexture(textures[i]);
}
}
context.deleteBuffer(verticesBuffer);
context.deleteBuffer(indicesBuffer);
};
}
/**
* @param {Array<number>} flatCoordinates Flat coordinates.
* @param {number} offset Offset.
* @param {number} end End.
* @param {number} stride Stride.
* @return {number} My end.
* @protected
*/
drawCoordinates(flatCoordinates, offset, end, stride) {
const anchorX = /** @type {number} */ (this.anchorX);
const anchorY = /** @type {number} */ (this.anchorY);
const height = /** @type {number} */ (this.height);
const imageHeight = /** @type {number} */ (this.imageHeight);
const imageWidth = /** @type {number} */ (this.imageWidth);
const opacity = /** @type {number} */ (this.opacity);
const originX = /** @type {number} */ (this.originX);
const originY = /** @type {number} */ (this.originY);
const rotateWithView = this.rotateWithView ? 1.0 : 0.0;
// this.rotation_ is anti-clockwise, but rotation is clockwise
const rotation = /** @type {number} */ (-this.rotation);
const scale = /** @type {number} */ (this.scale);
const width = /** @type {number} */ (this.width);
const cos = Math.cos(rotation);
const sin = Math.sin(rotation);
let numIndices = this.indices.length;
let numVertices = this.vertices.length;
let i, n, offsetX, offsetY, x, y;
for (i = offset; i < end; i += stride) {
x = flatCoordinates[i] - this.origin[0];
y = flatCoordinates[i + 1] - this.origin[1];
// There are 4 vertices per [x, y] point, one for each corner of the
// rectangle we're going to draw. We'd use 1 vertex per [x, y] point if
// WebGL supported Geometry Shaders (which can emit new vertices), but that
// is not currently the case.
//
// And each vertex includes 8 values: the x and y coordinates, the x and
// y offsets used to calculate the position of the corner, the u and
// v texture coordinates for the corner, the opacity, and whether the
// the image should be rotated with the view (rotateWithView).
n = numVertices / 8;
// bottom-left corner
offsetX = -scale * anchorX;
offsetY = -scale * (height - anchorY);
this.vertices[numVertices++] = x;
this.vertices[numVertices++] = y;
this.vertices[numVertices++] = offsetX * cos - offsetY * sin;
this.vertices[numVertices++] = offsetX * sin + offsetY * cos;
this.vertices[numVertices++] = originX / imageWidth;
this.vertices[numVertices++] = (originY + height) / imageHeight;
this.vertices[numVertices++] = opacity;
this.vertices[numVertices++] = rotateWithView;
// bottom-right corner
offsetX = scale * (width - anchorX);
offsetY = -scale * (height - anchorY);
this.vertices[numVertices++] = x;
this.vertices[numVertices++] = y;
this.vertices[numVertices++] = offsetX * cos - offsetY * sin;
this.vertices[numVertices++] = offsetX * sin + offsetY * cos;
this.vertices[numVertices++] = (originX + width) / imageWidth;
this.vertices[numVertices++] = (originY + height) / imageHeight;
this.vertices[numVertices++] = opacity;
this.vertices[numVertices++] = rotateWithView;
// top-right corner
offsetX = scale * (width - anchorX);
offsetY = scale * anchorY;
this.vertices[numVertices++] = x;
this.vertices[numVertices++] = y;
this.vertices[numVertices++] = offsetX * cos - offsetY * sin;
this.vertices[numVertices++] = offsetX * sin + offsetY * cos;
this.vertices[numVertices++] = (originX + width) / imageWidth;
this.vertices[numVertices++] = originY / imageHeight;
this.vertices[numVertices++] = opacity;
this.vertices[numVertices++] = rotateWithView;
// top-left corner
offsetX = -scale * anchorX;
offsetY = scale * anchorY;
this.vertices[numVertices++] = x;
this.vertices[numVertices++] = y;
this.vertices[numVertices++] = offsetX * cos - offsetY * sin;
this.vertices[numVertices++] = offsetX * sin + offsetY * cos;
this.vertices[numVertices++] = originX / imageWidth;
this.vertices[numVertices++] = originY / imageHeight;
this.vertices[numVertices++] = opacity;
this.vertices[numVertices++] = rotateWithView;
this.indices[numIndices++] = n;
this.indices[numIndices++] = n + 1;
this.indices[numIndices++] = n + 2;
this.indices[numIndices++] = n;
this.indices[numIndices++] = n + 2;
this.indices[numIndices++] = n + 3;
}
return numVertices;
}
/**
* @protected
* @param {Array<WebGLTexture>} textures Textures.
* @param {Array<HTMLCanvasElement|HTMLImageElement|HTMLVideoElement>} images Images.
* @param {!Object<string, WebGLTexture>} texturePerImage Texture cache.
* @param {WebGLRenderingContext} gl Gl.
*/
createTextures(textures, images, texturePerImage, gl) {
let texture, image, uid, i;
const ii = images.length;
for (i = 0; i < ii; ++i) {
image = images[i];
uid = getUid(image);
if (uid in texturePerImage) {
texture = texturePerImage[uid];
} else {
texture = createTexture(
gl, image, CLAMP_TO_EDGE, CLAMP_TO_EDGE);
texturePerImage[uid] = texture;
}
textures[i] = texture;
}
}
/**
* @inheritDoc
*/
setUpProgram(gl, context, size, pixelRatio) {
// get the program
const program = context.getProgram(fragment, vertex);
// get the locations
let locations;
if (!this.defaultLocations) {
locations = new Locations(gl, program);
this.defaultLocations = locations;
} else {
locations = this.defaultLocations;
}
// use the program (FIXME: use the return value)
context.useProgram(program);
// enable the vertex attrib arrays
gl.enableVertexAttribArray(locations.a_position);
gl.vertexAttribPointer(locations.a_position, 2, FLOAT,
false, 32, 0);
gl.enableVertexAttribArray(locations.a_offsets);
gl.vertexAttribPointer(locations.a_offsets, 2, FLOAT,
false, 32, 8);
gl.enableVertexAttribArray(locations.a_texCoord);
gl.vertexAttribPointer(locations.a_texCoord, 2, FLOAT,
false, 32, 16);
gl.enableVertexAttribArray(locations.a_opacity);
gl.vertexAttribPointer(locations.a_opacity, 1, FLOAT,
false, 32, 24);
gl.enableVertexAttribArray(locations.a_rotateWithView);
gl.vertexAttribPointer(locations.a_rotateWithView, 1, FLOAT,
false, 32, 28);
return locations;
}
/**
* @inheritDoc
*/
shutDownProgram(gl, locations) {
gl.disableVertexAttribArray(locations.a_position);
gl.disableVertexAttribArray(locations.a_offsets);
gl.disableVertexAttribArray(locations.a_texCoord);
gl.disableVertexAttribArray(locations.a_opacity);
gl.disableVertexAttribArray(locations.a_rotateWithView);
}
/**
* @inheritDoc
*/
drawReplay(gl, context, skippedFeaturesHash, hitDetection) {
const textures = hitDetection ? this.getHitDetectionTextures() : this.getTextures();
const groupIndices = hitDetection ? this.hitDetectionGroupIndices : this.groupIndices;
if (!isEmpty(skippedFeaturesHash)) {
this.drawReplaySkipping(gl, context, skippedFeaturesHash, textures, groupIndices);
} else {
let i, ii, start;
for (i = 0, ii = textures.length, start = 0; i < ii; ++i) {
gl.bindTexture(TEXTURE_2D, textures[i]);
const end = groupIndices[i];
this.drawElements(gl, context, start, end);
start = end;
}
}
}
/**
* Draw the replay while paying attention to skipped features.
*
* This functions creates groups of features that can be drawn to together,
* so that the number of `drawElements` calls is minimized.
*
* For example given the following texture groups:
*
* Group 1: A B C
* Group 2: D [E] F G
*
* If feature E should be skipped, the following `drawElements` calls will be
* made:
*
* drawElements with feature A, B and C
* drawElements with feature D
* drawElements with feature F and G
*
* @protected
* @param {WebGLRenderingContext} gl gl.
* @param {import("../../webgl/Context.js").default} context Context.
* @param {Object<string, boolean>} skippedFeaturesHash Ids of features
* to skip.
* @param {Array<WebGLTexture>} textures Textures.
* @param {Array<number>} groupIndices Texture group indices.
*/
drawReplaySkipping(gl, context, skippedFeaturesHash, textures, groupIndices) {
let featureIndex = 0;
let i, ii;
for (i = 0, ii = textures.length; i < ii; ++i) {
gl.bindTexture(TEXTURE_2D, textures[i]);
const groupStart = (i > 0) ? groupIndices[i - 1] : 0;
const groupEnd = groupIndices[i];
let start = groupStart;
let end = groupStart;
while (featureIndex < this.startIndices.length &&
this.startIndices[featureIndex] <= groupEnd) {
const feature = this.startIndicesFeature[featureIndex];
if (skippedFeaturesHash[getUid(feature)] !== undefined) {
// feature should be skipped
if (start !== end) {
// draw the features so far
this.drawElements(gl, context, start, end);
}
// continue with the next feature
start = (featureIndex === this.startIndices.length - 1) ?
groupEnd : this.startIndices[featureIndex + 1];
end = start;
} else {
// the feature is not skipped, augment the end index
end = (featureIndex === this.startIndices.length - 1) ?
groupEnd : this.startIndices[featureIndex + 1];
}
featureIndex++;
}
if (start !== end) {
// draw the remaining features (in case there was no skipped feature
// in this texture group, all features of a group are drawn together)
this.drawElements(gl, context, start, end);
}
}
}
/**
* @inheritDoc
*/
drawHitDetectionReplayOneByOne(gl, context, skippedFeaturesHash, featureCallback, opt_hitExtent) {
let i, groupStart, start, end, feature;
let featureIndex = this.startIndices.length - 1;
const hitDetectionTextures = this.getHitDetectionTextures();
for (i = hitDetectionTextures.length - 1; i >= 0; --i) {
gl.bindTexture(TEXTURE_2D, hitDetectionTextures[i]);
groupStart = (i > 0) ? this.hitDetectionGroupIndices[i - 1] : 0;
end = this.hitDetectionGroupIndices[i];
// draw all features for this texture group
while (featureIndex >= 0 &&
this.startIndices[featureIndex] >= groupStart) {
start = this.startIndices[featureIndex];
feature = this.startIndicesFeature[featureIndex];
if (skippedFeaturesHash[getUid(feature)] === undefined &&
feature.getGeometry() &&
(opt_hitExtent === undefined || intersects(
/** @type {Array<number>} */ (opt_hitExtent),
feature.getGeometry().getExtent()))) {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
this.drawElements(gl, context, start, end);
const result = featureCallback(feature);
if (result) {
return result;
}
}
end = start;
featureIndex--;
}
}
return undefined;
}
/**
* @inheritDoc
*/
finish(context) {
this.anchorX = undefined;
this.anchorY = undefined;
this.height = undefined;
this.imageHeight = undefined;
this.imageWidth = undefined;
this.indices = null;
this.opacity = undefined;
this.originX = undefined;
this.originY = undefined;
this.rotateWithView = undefined;
this.rotation = undefined;
this.scale = undefined;
this.vertices = null;
this.width = undefined;
}
/**
* @abstract
* @protected
* @param {boolean=} opt_all Return hit detection textures with regular ones.
* @return {Array<WebGLTexture>} Textures.
*/
getTextures(opt_all) {
return abstract();
}
/**
* @abstract
* @protected
* @return {Array<WebGLTexture>} Textures.
*/
getHitDetectionTextures() {
return abstract();
}
}
export default WebGLTextureReplay;

View File

@@ -1,100 +0,0 @@
//! MODULE=ol/render/webgl/circlereplay/defaultshader
//! COMMON
varying vec2 v_center;
varying vec2 v_offset;
varying float v_halfWidth;
varying float v_pixelRatio;
//! VERTEX
attribute vec2 a_position;
attribute float a_instruction;
attribute float a_radius;
uniform mat4 u_projectionMatrix;
uniform mat4 u_offsetScaleMatrix;
uniform mat4 u_offsetRotateMatrix;
uniform float u_lineWidth;
uniform float u_pixelRatio;
void main(void) {
mat4 offsetMatrix = u_offsetScaleMatrix * u_offsetRotateMatrix;
v_center = vec4(u_projectionMatrix * vec4(a_position, 0.0, 1.0)).xy;
v_pixelRatio = u_pixelRatio;
float lineWidth = u_lineWidth * u_pixelRatio;
v_halfWidth = lineWidth / 2.0;
if (lineWidth == 0.0) {
lineWidth = 2.0 * u_pixelRatio;
}
vec2 offset;
// Radius with anitaliasing (roughly).
float radius = a_radius + 3.0 * u_pixelRatio;
// Until we get gl_VertexID in WebGL, we store an instruction.
if (a_instruction == 0.0) {
// Offsetting the edges of the triangle by lineWidth / 2 is necessary, however
// we should also leave some space for the antialiasing, thus we offset by lineWidth.
offset = vec2(-1.0, 1.0);
} else if (a_instruction == 1.0) {
offset = vec2(-1.0, -1.0);
} else if (a_instruction == 2.0) {
offset = vec2(1.0, -1.0);
} else {
offset = vec2(1.0, 1.0);
}
gl_Position = u_projectionMatrix * vec4(a_position + offset * radius, 0.0, 1.0) +
offsetMatrix * vec4(offset * lineWidth, 0.0, 0.0);
v_offset = vec4(u_projectionMatrix * vec4(a_position.x + a_radius, a_position.y,
0.0, 1.0)).xy;
if (distance(v_center, v_offset) > 20000.0) {
gl_Position = vec4(v_center, 0.0, 1.0);
}
}
//! FRAGMENT
uniform float u_opacity;
uniform vec4 u_fillColor;
uniform vec4 u_strokeColor;
uniform vec2 u_size;
void main(void) {
vec2 windowCenter = vec2((v_center.x + 1.0) / 2.0 * u_size.x * v_pixelRatio,
(v_center.y + 1.0) / 2.0 * u_size.y * v_pixelRatio);
vec2 windowOffset = vec2((v_offset.x + 1.0) / 2.0 * u_size.x * v_pixelRatio,
(v_offset.y + 1.0) / 2.0 * u_size.y * v_pixelRatio);
float radius = length(windowCenter - windowOffset);
float dist = length(windowCenter - gl_FragCoord.xy);
if (dist > radius + v_halfWidth) {
if (u_strokeColor.a == 0.0) {
gl_FragColor = u_fillColor;
} else {
gl_FragColor = u_strokeColor;
}
gl_FragColor.a = gl_FragColor.a - (dist - (radius + v_halfWidth));
} else if (u_fillColor.a == 0.0) {
// Hooray, no fill, just stroke. We can use real antialiasing.
gl_FragColor = u_strokeColor;
if (dist < radius - v_halfWidth) {
gl_FragColor.a = gl_FragColor.a - (radius - v_halfWidth - dist);
}
} else {
gl_FragColor = u_fillColor;
float strokeDist = radius - v_halfWidth;
float antialias = 2.0 * v_pixelRatio;
if (dist > strokeDist) {
gl_FragColor = u_strokeColor;
} else if (dist >= strokeDist - antialias) {
float step = smoothstep(strokeDist - antialias, strokeDist, dist);
gl_FragColor = mix(u_fillColor, u_strokeColor, step);
}
}
gl_FragColor.a = gl_FragColor.a * u_opacity;
if (gl_FragColor.a <= 0.0) {
discard;
}
}

View File

@@ -1,17 +0,0 @@
/**
* @module ol/render/webgl/circlereplay/defaultshader
*/
// This file is automatically generated, do not edit.
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../webgl.js';
import WebGLFragment from '../../../webgl/Fragment.js';
import WebGLVertex from '../../../webgl/Vertex.js';
export const fragment = new WebGLFragment(DEBUG_WEBGL ?
'precision mediump float;\nvarying vec2 v_center;\nvarying vec2 v_offset;\nvarying float v_halfWidth;\nvarying float v_pixelRatio;\n\n\n\nuniform float u_opacity;\nuniform vec4 u_fillColor;\nuniform vec4 u_strokeColor;\nuniform vec2 u_size;\n\nvoid main(void) {\n vec2 windowCenter = vec2((v_center.x + 1.0) / 2.0 * u_size.x * v_pixelRatio,\n (v_center.y + 1.0) / 2.0 * u_size.y * v_pixelRatio);\n vec2 windowOffset = vec2((v_offset.x + 1.0) / 2.0 * u_size.x * v_pixelRatio,\n (v_offset.y + 1.0) / 2.0 * u_size.y * v_pixelRatio);\n float radius = length(windowCenter - windowOffset);\n float dist = length(windowCenter - gl_FragCoord.xy);\n if (dist > radius + v_halfWidth) {\n if (u_strokeColor.a == 0.0) {\n gl_FragColor = u_fillColor;\n } else {\n gl_FragColor = u_strokeColor;\n }\n gl_FragColor.a = gl_FragColor.a - (dist - (radius + v_halfWidth));\n } else if (u_fillColor.a == 0.0) {\n // Hooray, no fill, just stroke. We can use real antialiasing.\n gl_FragColor = u_strokeColor;\n if (dist < radius - v_halfWidth) {\n gl_FragColor.a = gl_FragColor.a - (radius - v_halfWidth - dist);\n }\n } else {\n gl_FragColor = u_fillColor;\n float strokeDist = radius - v_halfWidth;\n float antialias = 2.0 * v_pixelRatio;\n if (dist > strokeDist) {\n gl_FragColor = u_strokeColor;\n } else if (dist >= strokeDist - antialias) {\n float step = smoothstep(strokeDist - antialias, strokeDist, dist);\n gl_FragColor = mix(u_fillColor, u_strokeColor, step);\n }\n }\n gl_FragColor.a = gl_FragColor.a * u_opacity;\n if (gl_FragColor.a <= 0.0) {\n discard;\n }\n}\n' :
'precision mediump float;varying vec2 a;varying vec2 b;varying float c;varying float d;uniform float m;uniform vec4 n;uniform vec4 o;uniform vec2 p;void main(void){vec2 windowCenter=vec2((a.x+1.0)/2.0*p.x*d,(a.y+1.0)/2.0*p.y*d);vec2 windowOffset=vec2((b.x+1.0)/2.0*p.x*d,(b.y+1.0)/2.0*p.y*d);float radius=length(windowCenter-windowOffset);float dist=length(windowCenter-gl_FragCoord.xy);if(dist>radius+c){if(o.a==0.0){gl_FragColor=n;}else{gl_FragColor=o;}gl_FragColor.a=gl_FragColor.a-(dist-(radius+c));}else if(n.a==0.0){gl_FragColor=o;if(dist<radius-c){gl_FragColor.a=gl_FragColor.a-(radius-c-dist);}} else{gl_FragColor=n;float strokeDist=radius-c;float antialias=2.0*d;if(dist>strokeDist){gl_FragColor=o;}else if(dist>=strokeDist-antialias){float step=smoothstep(strokeDist-antialias,strokeDist,dist);gl_FragColor=mix(n,o,step);}} gl_FragColor.a=gl_FragColor.a*m;if(gl_FragColor.a<=0.0){discard;}}');
export const vertex = new WebGLVertex(DEBUG_WEBGL ?
'varying vec2 v_center;\nvarying vec2 v_offset;\nvarying float v_halfWidth;\nvarying float v_pixelRatio;\n\n\nattribute vec2 a_position;\nattribute float a_instruction;\nattribute float a_radius;\n\nuniform mat4 u_projectionMatrix;\nuniform mat4 u_offsetScaleMatrix;\nuniform mat4 u_offsetRotateMatrix;\nuniform float u_lineWidth;\nuniform float u_pixelRatio;\n\nvoid main(void) {\n mat4 offsetMatrix = u_offsetScaleMatrix * u_offsetRotateMatrix;\n v_center = vec4(u_projectionMatrix * vec4(a_position, 0.0, 1.0)).xy;\n v_pixelRatio = u_pixelRatio;\n float lineWidth = u_lineWidth * u_pixelRatio;\n v_halfWidth = lineWidth / 2.0;\n if (lineWidth == 0.0) {\n lineWidth = 2.0 * u_pixelRatio;\n }\n vec2 offset;\n // Radius with anitaliasing (roughly).\n float radius = a_radius + 3.0 * u_pixelRatio;\n // Until we get gl_VertexID in WebGL, we store an instruction.\n if (a_instruction == 0.0) {\n // Offsetting the edges of the triangle by lineWidth / 2 is necessary, however\n // we should also leave some space for the antialiasing, thus we offset by lineWidth.\n offset = vec2(-1.0, 1.0);\n } else if (a_instruction == 1.0) {\n offset = vec2(-1.0, -1.0);\n } else if (a_instruction == 2.0) {\n offset = vec2(1.0, -1.0);\n } else {\n offset = vec2(1.0, 1.0);\n }\n\n gl_Position = u_projectionMatrix * vec4(a_position + offset * radius, 0.0, 1.0) +\n offsetMatrix * vec4(offset * lineWidth, 0.0, 0.0);\n v_offset = vec4(u_projectionMatrix * vec4(a_position.x + a_radius, a_position.y,\n 0.0, 1.0)).xy;\n\n if (distance(v_center, v_offset) > 20000.0) {\n gl_Position = vec4(v_center, 0.0, 1.0);\n }\n}\n\n\n' :
'varying vec2 a;varying vec2 b;varying float c;varying float d;attribute vec2 e;attribute float f;attribute float g;uniform mat4 h;uniform mat4 i;uniform mat4 j;uniform float k;uniform float l;void main(void){mat4 offsetMatrix=i*j;a=vec4(h*vec4(e,0.0,1.0)).xy;d=l;float lineWidth=k*l;c=lineWidth/2.0;if(lineWidth==0.0){lineWidth=2.0*l;}vec2 offset;float radius=g+3.0*l;if(f==0.0){offset=vec2(-1.0,1.0);}else if(f==1.0){offset=vec2(-1.0,-1.0);}else if(f==2.0){offset=vec2(1.0,-1.0);}else{offset=vec2(1.0,1.0);}gl_Position=h*vec4(e+offset*radius,0.0,1.0)+offsetMatrix*vec4(offset*lineWidth,0.0,0.0);b=vec4(h*vec4(e.x+g,e.y,0.0,1.0)).xy;if(distance(a,b)>20000.0){gl_Position=vec4(a,0.0,1.0);}}');

View File

@@ -1,93 +0,0 @@
/**
* @module ol/render/webgl/circlereplay/defaultshader/Locations
*/
// This file is automatically generated, do not edit
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../../webgl.js';
class Locations {
/**
* @param {WebGLRenderingContext} gl GL.
* @param {WebGLProgram} program Program.
*/
constructor(gl, program) {
/**
* @type {WebGLUniformLocation}
*/
this.u_projectionMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_projectionMatrix' : 'h');
/**
* @type {WebGLUniformLocation}
*/
this.u_offsetScaleMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_offsetScaleMatrix' : 'i');
/**
* @type {WebGLUniformLocation}
*/
this.u_offsetRotateMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_offsetRotateMatrix' : 'j');
/**
* @type {WebGLUniformLocation}
*/
this.u_lineWidth = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_lineWidth' : 'k');
/**
* @type {WebGLUniformLocation}
*/
this.u_pixelRatio = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_pixelRatio' : 'l');
/**
* @type {WebGLUniformLocation}
*/
this.u_opacity = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_opacity' : 'm');
/**
* @type {WebGLUniformLocation}
*/
this.u_fillColor = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_fillColor' : 'n');
/**
* @type {WebGLUniformLocation}
*/
this.u_strokeColor = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_strokeColor' : 'o');
/**
* @type {WebGLUniformLocation}
*/
this.u_size = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_size' : 'p');
/**
* @type {number}
*/
this.a_position = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_position' : 'e');
/**
* @type {number}
*/
this.a_instruction = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_instruction' : 'f');
/**
* @type {number}
*/
this.a_radius = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_radius' : 'g');
}
}
export default Locations;

View File

@@ -1,160 +0,0 @@
//! MODULE=ol/render/webgl/linestringreplay/defaultshader
//! COMMON
varying float v_round;
varying vec2 v_roundVertex;
varying float v_halfWidth;
//! VERTEX
attribute vec2 a_lastPos;
attribute vec2 a_position;
attribute vec2 a_nextPos;
attribute float a_direction;
uniform mat4 u_projectionMatrix;
uniform mat4 u_offsetScaleMatrix;
uniform mat4 u_offsetRotateMatrix;
uniform float u_lineWidth;
uniform float u_miterLimit;
bool nearlyEquals(in float value, in float ref) {
float epsilon = 0.000000000001;
return value >= ref - epsilon && value <= ref + epsilon;
}
void alongNormal(out vec2 offset, in vec2 nextP, in float turnDir, in float direction) {
vec2 dirVect = nextP - a_position;
vec2 normal = normalize(vec2(-turnDir * dirVect.y, turnDir * dirVect.x));
offset = u_lineWidth / 2.0 * normal * direction;
}
void miterUp(out vec2 offset, out float round, in bool isRound, in float direction) {
float halfWidth = u_lineWidth / 2.0;
vec2 tangent = normalize(normalize(a_nextPos - a_position) + normalize(a_position - a_lastPos));
vec2 normal = vec2(-tangent.y, tangent.x);
vec2 dirVect = a_nextPos - a_position;
vec2 tmpNormal = normalize(vec2(-dirVect.y, dirVect.x));
float miterLength = abs(halfWidth / dot(normal, tmpNormal));
offset = normal * direction * miterLength;
round = 0.0;
if (isRound) {
round = 1.0;
} else if (miterLength > u_miterLimit + u_lineWidth) {
offset = halfWidth * tmpNormal * direction;
}
}
bool miterDown(out vec2 offset, in vec4 projPos, in mat4 offsetMatrix, in float direction) {
bool degenerate = false;
vec2 tangent = normalize(normalize(a_nextPos - a_position) + normalize(a_position - a_lastPos));
vec2 normal = vec2(-tangent.y, tangent.x);
vec2 dirVect = a_lastPos - a_position;
vec2 tmpNormal = normalize(vec2(-dirVect.y, dirVect.x));
vec2 longOffset, shortOffset, longVertex;
vec4 shortProjVertex;
float halfWidth = u_lineWidth / 2.0;
if (length(a_nextPos - a_position) > length(a_lastPos - a_position)) {
longOffset = tmpNormal * direction * halfWidth;
shortOffset = normalize(vec2(dirVect.y, -dirVect.x)) * direction * halfWidth;
longVertex = a_nextPos;
shortProjVertex = u_projectionMatrix * vec4(a_lastPos, 0.0, 1.0);
} else {
shortOffset = tmpNormal * direction * halfWidth;
longOffset = normalize(vec2(dirVect.y, -dirVect.x)) * direction * halfWidth;
longVertex = a_lastPos;
shortProjVertex = u_projectionMatrix * vec4(a_nextPos, 0.0, 1.0);
}
//Intersection algorithm based on theory by Paul Bourke (http://paulbourke.net/geometry/pointlineplane/).
vec4 p1 = u_projectionMatrix * vec4(longVertex, 0.0, 1.0) + offsetMatrix * vec4(longOffset, 0.0, 0.0);
vec4 p2 = projPos + offsetMatrix * vec4(longOffset, 0.0, 0.0);
vec4 p3 = shortProjVertex + offsetMatrix * vec4(-shortOffset, 0.0, 0.0);
vec4 p4 = shortProjVertex + offsetMatrix * vec4(shortOffset, 0.0, 0.0);
float denom = (p4.y - p3.y) * (p2.x - p1.x) - (p4.x - p3.x) * (p2.y - p1.y);
float firstU = ((p4.x - p3.x) * (p1.y - p3.y) - (p4.y - p3.y) * (p1.x - p3.x)) / denom;
float secondU = ((p2.x - p1.x) * (p1.y - p3.y) - (p2.y - p1.y) * (p1.x - p3.x)) / denom;
float epsilon = 0.000000000001;
if (firstU > epsilon && firstU < 1.0 - epsilon && secondU > epsilon && secondU < 1.0 - epsilon) {
shortProjVertex.x = p1.x + firstU * (p2.x - p1.x);
shortProjVertex.y = p1.y + firstU * (p2.y - p1.y);
offset = shortProjVertex.xy;
degenerate = true;
} else {
float miterLength = abs(halfWidth / dot(normal, tmpNormal));
offset = normal * direction * miterLength;
}
return degenerate;
}
void squareCap(out vec2 offset, out float round, in bool isRound, in vec2 nextP,
in float turnDir, in float direction) {
round = 0.0;
vec2 dirVect = a_position - nextP;
vec2 firstNormal = normalize(dirVect);
vec2 secondNormal = vec2(turnDir * firstNormal.y * direction, -turnDir * firstNormal.x * direction);
vec2 hypotenuse = normalize(firstNormal - secondNormal);
vec2 normal = vec2(turnDir * hypotenuse.y * direction, -turnDir * hypotenuse.x * direction);
float length = sqrt(v_halfWidth * v_halfWidth * 2.0);
offset = normal * length;
if (isRound) {
round = 1.0;
}
}
void main(void) {
bool degenerate = false;
float direction = float(sign(a_direction));
mat4 offsetMatrix = u_offsetScaleMatrix * u_offsetRotateMatrix;
vec2 offset;
vec4 projPos = u_projectionMatrix * vec4(a_position, 0.0, 1.0);
bool round = nearlyEquals(mod(a_direction, 2.0), 0.0);
v_round = 0.0;
v_halfWidth = u_lineWidth / 2.0;
v_roundVertex = projPos.xy;
if (nearlyEquals(mod(a_direction, 3.0), 0.0) || nearlyEquals(mod(a_direction, 17.0), 0.0)) {
alongNormal(offset, a_nextPos, 1.0, direction);
} else if (nearlyEquals(mod(a_direction, 5.0), 0.0) || nearlyEquals(mod(a_direction, 13.0), 0.0)) {
alongNormal(offset, a_lastPos, -1.0, direction);
} else if (nearlyEquals(mod(a_direction, 23.0), 0.0)) {
miterUp(offset, v_round, round, direction);
} else if (nearlyEquals(mod(a_direction, 19.0), 0.0)) {
degenerate = miterDown(offset, projPos, offsetMatrix, direction);
} else if (nearlyEquals(mod(a_direction, 7.0), 0.0)) {
squareCap(offset, v_round, round, a_nextPos, 1.0, direction);
} else if (nearlyEquals(mod(a_direction, 11.0), 0.0)) {
squareCap(offset, v_round, round, a_lastPos, -1.0, direction);
}
if (!degenerate) {
vec4 offsets = offsetMatrix * vec4(offset, 0.0, 0.0);
gl_Position = projPos + offsets;
} else {
gl_Position = vec4(offset, 0.0, 1.0);
}
}
//! FRAGMENT
uniform float u_opacity;
uniform vec4 u_color;
uniform vec2 u_size;
uniform float u_pixelRatio;
void main(void) {
if (v_round > 0.0) {
vec2 windowCoords = vec2((v_roundVertex.x + 1.0) / 2.0 * u_size.x * u_pixelRatio,
(v_roundVertex.y + 1.0) / 2.0 * u_size.y * u_pixelRatio);
if (length(windowCoords - gl_FragCoord.xy) > v_halfWidth * u_pixelRatio) {
discard;
}
}
gl_FragColor = u_color;
float alpha = u_color.a * u_opacity;
if (alpha == 0.0) {
discard;
}
gl_FragColor.a = alpha;
}

File diff suppressed because one or more lines are too long

View File

@@ -1,99 +0,0 @@
/**
* @module ol/render/webgl/linestringreplay/defaultshader/Locations
*/
// This file is automatically generated, do not edit
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../../webgl.js';
class Locations {
/**
* @param {WebGLRenderingContext} gl GL.
* @param {WebGLProgram} program Program.
*/
constructor(gl, program) {
/**
* @type {WebGLUniformLocation}
*/
this.u_projectionMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_projectionMatrix' : 'h');
/**
* @type {WebGLUniformLocation}
*/
this.u_offsetScaleMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_offsetScaleMatrix' : 'i');
/**
* @type {WebGLUniformLocation}
*/
this.u_offsetRotateMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_offsetRotateMatrix' : 'j');
/**
* @type {WebGLUniformLocation}
*/
this.u_lineWidth = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_lineWidth' : 'k');
/**
* @type {WebGLUniformLocation}
*/
this.u_miterLimit = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_miterLimit' : 'l');
/**
* @type {WebGLUniformLocation}
*/
this.u_opacity = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_opacity' : 'm');
/**
* @type {WebGLUniformLocation}
*/
this.u_color = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_color' : 'n');
/**
* @type {WebGLUniformLocation}
*/
this.u_size = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_size' : 'o');
/**
* @type {WebGLUniformLocation}
*/
this.u_pixelRatio = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_pixelRatio' : 'p');
/**
* @type {number}
*/
this.a_lastPos = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_lastPos' : 'd');
/**
* @type {number}
*/
this.a_position = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_position' : 'e');
/**
* @type {number}
*/
this.a_nextPos = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_nextPos' : 'f');
/**
* @type {number}
*/
this.a_direction = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_direction' : 'g');
}
}
export default Locations;

View File

@@ -1,31 +0,0 @@
//! MODULE=ol/render/webgl/polygonreplay/defaultshader
//! COMMON
//! VERTEX
attribute vec2 a_position;
uniform mat4 u_projectionMatrix;
uniform mat4 u_offsetScaleMatrix;
uniform mat4 u_offsetRotateMatrix;
void main(void) {
gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0);
}
//! FRAGMENT
uniform vec4 u_color;
uniform float u_opacity;
void main(void) {
gl_FragColor = u_color;
float alpha = u_color.a * u_opacity;
if (alpha == 0.0) {
discard;
}
gl_FragColor.a = alpha;
}

View File

@@ -1,17 +0,0 @@
/**
* @module ol/render/webgl/polygonreplay/defaultshader
*/
// This file is automatically generated, do not edit.
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../webgl.js';
import WebGLFragment from '../../../webgl/Fragment.js';
import WebGLVertex from '../../../webgl/Vertex.js';
export const fragment = new WebGLFragment(DEBUG_WEBGL ?
'precision mediump float;\n\n\n\nuniform vec4 u_color;\nuniform float u_opacity;\n\nvoid main(void) {\n gl_FragColor = u_color;\n float alpha = u_color.a * u_opacity;\n if (alpha == 0.0) {\n discard;\n }\n gl_FragColor.a = alpha;\n}\n' :
'precision mediump float;uniform vec4 e;uniform float f;void main(void){gl_FragColor=e;float alpha=e.a*f;if(alpha==0.0){discard;}gl_FragColor.a=alpha;}');
export const vertex = new WebGLVertex(DEBUG_WEBGL ?
'\n\nattribute vec2 a_position;\n\nuniform mat4 u_projectionMatrix;\nuniform mat4 u_offsetScaleMatrix;\nuniform mat4 u_offsetRotateMatrix;\n\nvoid main(void) {\n gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0);\n}\n\n\n' :
'attribute vec2 a;uniform mat4 b;uniform mat4 c;uniform mat4 d;void main(void){gl_Position=b*vec4(a,0.0,1.0);}');

View File

@@ -1,57 +0,0 @@
/**
* @module ol/render/webgl/polygonreplay/defaultshader/Locations
*/
// This file is automatically generated, do not edit
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../../webgl.js';
class Locations {
/**
* @param {WebGLRenderingContext} gl GL.
* @param {WebGLProgram} program Program.
*/
constructor(gl, program) {
/**
* @type {WebGLUniformLocation}
*/
this.u_projectionMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_projectionMatrix' : 'b');
/**
* @type {WebGLUniformLocation}
*/
this.u_offsetScaleMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_offsetScaleMatrix' : 'c');
/**
* @type {WebGLUniformLocation}
*/
this.u_offsetRotateMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_offsetRotateMatrix' : 'd');
/**
* @type {WebGLUniformLocation}
*/
this.u_color = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_color' : 'e');
/**
* @type {WebGLUniformLocation}
*/
this.u_opacity = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_opacity' : 'f');
/**
* @type {number}
*/
this.a_position = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_position' : 'a');
}
}
export default Locations;

View File

@@ -1,43 +0,0 @@
//! MODULE=ol/render/webgl/texturereplay/defaultshader
//! COMMON
varying vec2 v_texCoord;
varying float v_opacity;
//! VERTEX
attribute vec2 a_position;
attribute vec2 a_texCoord;
attribute vec2 a_offsets;
attribute float a_opacity;
attribute float a_rotateWithView;
uniform mat4 u_projectionMatrix;
uniform mat4 u_offsetScaleMatrix;
uniform mat4 u_offsetRotateMatrix;
void main(void) {
mat4 offsetMatrix = u_offsetScaleMatrix;
if (a_rotateWithView == 1.0) {
offsetMatrix = u_offsetScaleMatrix * u_offsetRotateMatrix;
}
vec4 offsets = offsetMatrix * vec4(a_offsets, 0.0, 0.0);
gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0) + offsets;
v_texCoord = a_texCoord;
v_opacity = a_opacity;
}
//! FRAGMENT
uniform float u_opacity;
uniform sampler2D u_image;
void main(void) {
vec4 texColor = texture2D(u_image, v_texCoord);
gl_FragColor.rgb = texColor.rgb;
float alpha = texColor.a * v_opacity * u_opacity;
if (alpha == 0.0) {
discard;
}
gl_FragColor.a = alpha;
}

View File

@@ -1,17 +0,0 @@
/**
* @module ol/render/webgl/texturereplay/defaultshader
*/
// This file is automatically generated, do not edit.
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../webgl.js';
import WebGLFragment from '../../../webgl/Fragment.js';
import WebGLVertex from '../../../webgl/Vertex.js';
export const fragment = new WebGLFragment(DEBUG_WEBGL ?
'precision mediump float;\nvarying vec2 v_texCoord;\nvarying float v_opacity;\n\nuniform float u_opacity;\nuniform sampler2D u_image;\n\nvoid main(void) {\n vec4 texColor = texture2D(u_image, v_texCoord);\n gl_FragColor.rgb = texColor.rgb;\n float alpha = texColor.a * v_opacity * u_opacity;\n if (alpha == 0.0) {\n discard;\n }\n gl_FragColor.a = alpha;\n}\n' :
'precision mediump float;varying vec2 a;varying float b;uniform float k;uniform sampler2D l;void main(void){vec4 texColor=texture2D(l,a);gl_FragColor.rgb=texColor.rgb;float alpha=texColor.a*b*k;if(alpha==0.0){discard;}gl_FragColor.a=alpha;}');
export const vertex = new WebGLVertex(DEBUG_WEBGL ?
'varying vec2 v_texCoord;\nvarying float v_opacity;\n\nattribute vec2 a_position;\nattribute vec2 a_texCoord;\nattribute vec2 a_offsets;\nattribute float a_opacity;\nattribute float a_rotateWithView;\n\nuniform mat4 u_projectionMatrix;\nuniform mat4 u_offsetScaleMatrix;\nuniform mat4 u_offsetRotateMatrix;\n\nvoid main(void) {\n mat4 offsetMatrix = u_offsetScaleMatrix;\n if (a_rotateWithView == 1.0) {\n offsetMatrix = u_offsetScaleMatrix * u_offsetRotateMatrix;\n }\n vec4 offsets = offsetMatrix * vec4(a_offsets, 0.0, 0.0);\n gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0) + offsets;\n v_texCoord = a_texCoord;\n v_opacity = a_opacity;\n}\n\n\n' :
'varying vec2 a;varying float b;attribute vec2 c;attribute vec2 d;attribute vec2 e;attribute float f;attribute float g;uniform mat4 h;uniform mat4 i;uniform mat4 j;void main(void){mat4 offsetMatrix=i;if(g==1.0){offsetMatrix=i*j;}vec4 offsets=offsetMatrix*vec4(e,0.0,0.0);gl_Position=h*vec4(c,0.0,1.0)+offsets;a=d;b=f;}');

View File

@@ -1,81 +0,0 @@
/**
* @module ol/render/webgl/texturereplay/defaultshader/Locations
*/
// This file is automatically generated, do not edit
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../../webgl.js';
class Locations {
/**
* @param {WebGLRenderingContext} gl GL.
* @param {WebGLProgram} program Program.
*/
constructor(gl, program) {
/**
* @type {WebGLUniformLocation}
*/
this.u_projectionMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_projectionMatrix' : 'h');
/**
* @type {WebGLUniformLocation}
*/
this.u_offsetScaleMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_offsetScaleMatrix' : 'i');
/**
* @type {WebGLUniformLocation}
*/
this.u_offsetRotateMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_offsetRotateMatrix' : 'j');
/**
* @type {WebGLUniformLocation}
*/
this.u_opacity = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_opacity' : 'k');
/**
* @type {WebGLUniformLocation}
*/
this.u_image = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_image' : 'l');
/**
* @type {number}
*/
this.a_position = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_position' : 'c');
/**
* @type {number}
*/
this.a_texCoord = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_texCoord' : 'd');
/**
* @type {number}
*/
this.a_offsets = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_offsets' : 'e');
/**
* @type {number}
*/
this.a_opacity = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_opacity' : 'f');
/**
* @type {number}
*/
this.a_rotateWithView = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_rotateWithView' : 'g');
}
}
export default Locations;

View File

@@ -1,271 +0,0 @@
/**
* @module ol/renderer/webgl/ImageLayer
*/
import {ENABLE_RASTER_REPROJECTION} from '../../reproj/common.js';
import ViewHint from '../../ViewHint.js';
import {createCanvasContext2D} from '../../dom.js';
import {getIntersection, isEmpty} from '../../extent.js';
import WebGLLayerRenderer from './Layer.js';
import {
create as createTransform,
rotate as rotateTransform,
translate as translateTransform,
scale as scaleTransform,
reset as resetTransform,
apply as applyTransform,
invert as invertTransform,
multiply as multiplyTransform
} from '../../transform.js';
import {CLAMP_TO_EDGE} from '../../webgl.js';
import {createTexture} from '../../webgl/Context.js';
/**
* @classdesc
* WebGL renderer for image layers.
* @api
*/
class WebGLImageLayerRenderer extends WebGLLayerRenderer {
/**
* @param {import("./Map.js").default} mapRenderer Map renderer.
* @param {import("../../layer/Image.js").default} imageLayer Tile layer.
*/
constructor(mapRenderer, imageLayer) {
super(mapRenderer, imageLayer);
/**
* The last rendered image.
* @private
* @type {?import("../../ImageBase.js").default}
*/
this.image_ = null;
/**
* @private
* @type {CanvasRenderingContext2D}
*/
this.hitCanvasContext_ = null;
/**
* @private
* @type {?import("../../transform.js").Transform}
*/
this.hitTransformationMatrix_ = null;
}
/**
* @param {import("../../ImageBase.js").default} image Image.
* @private
* @return {WebGLTexture} Texture.
*/
createTexture_(image) {
// We meet the conditions to work with non-power of two textures.
// http://www.khronos.org/webgl/wiki/WebGL_and_OpenGL_Differences#Non-Power_of_Two_Texture_Support
// http://learningwebgl.com/blog/?p=2101
const imageElement = image.getImage();
const gl = this.mapRenderer.getGL();
return createTexture(
gl, imageElement, CLAMP_TO_EDGE, CLAMP_TO_EDGE);
}
/**
* @inheritDoc
*/
prepareFrame(frameState, layerState, context) {
const gl = this.mapRenderer.getGL();
const pixelRatio = frameState.pixelRatio;
const viewState = frameState.viewState;
const viewCenter = viewState.center;
const viewResolution = viewState.resolution;
const viewRotation = viewState.rotation;
let image = this.image_;
let texture = this.texture;
const imageLayer = /** @type {import("../../layer/Image.js").default} */ (this.getLayer());
const imageSource = /** @type {import("../../source/Image.js").default} */ (imageLayer.getSource());
const hints = frameState.viewHints;
let renderedExtent = frameState.extent;
if (layerState.extent !== undefined) {
renderedExtent = getIntersection(renderedExtent, layerState.extent);
}
if (!hints[ViewHint.ANIMATING] && !hints[ViewHint.INTERACTING] &&
!isEmpty(renderedExtent)) {
let projection = viewState.projection;
if (!ENABLE_RASTER_REPROJECTION) {
const sourceProjection = imageSource.getProjection();
if (sourceProjection) {
projection = sourceProjection;
}
}
const image_ = imageSource.getImage(renderedExtent, viewResolution,
pixelRatio, projection);
if (image_) {
const loaded = this.loadImage(image_);
if (loaded) {
image = image_;
texture = this.createTexture_(image_);
if (this.texture) {
/**
* @param {WebGLRenderingContext} gl GL.
* @param {WebGLTexture} texture Texture.
*/
const postRenderFunction = function(gl, texture) {
if (!gl.isContextLost()) {
gl.deleteTexture(texture);
}
}.bind(null, gl, this.texture);
frameState.postRenderFunctions.push(
/** @type {import("../../PluggableMap.js").PostRenderFunction} */ (postRenderFunction)
);
}
}
}
}
if (image) {
const canvas = this.mapRenderer.getContext().getCanvas();
this.updateProjectionMatrix_(canvas.width, canvas.height,
pixelRatio, viewCenter, viewResolution, viewRotation,
image.getExtent());
this.hitTransformationMatrix_ = null;
// Translate and scale to flip the Y coord.
const texCoordMatrix = this.texCoordMatrix;
resetTransform(texCoordMatrix);
scaleTransform(texCoordMatrix, 1, -1);
translateTransform(texCoordMatrix, 0, -1);
this.image_ = image;
this.texture = texture;
}
return !!image;
}
/**
* @param {number} canvasWidth Canvas width.
* @param {number} canvasHeight Canvas height.
* @param {number} pixelRatio Pixel ratio.
* @param {import("../../coordinate.js").Coordinate} viewCenter View center.
* @param {number} viewResolution View resolution.
* @param {number} viewRotation View rotation.
* @param {import("../../extent.js").Extent} imageExtent Image extent.
* @private
*/
updateProjectionMatrix_(
canvasWidth,
canvasHeight,
pixelRatio,
viewCenter,
viewResolution,
viewRotation,
imageExtent
) {
const canvasExtentWidth = canvasWidth * viewResolution;
const canvasExtentHeight = canvasHeight * viewResolution;
const projectionMatrix = this.projectionMatrix;
resetTransform(projectionMatrix);
scaleTransform(projectionMatrix,
pixelRatio * 2 / canvasExtentWidth,
pixelRatio * 2 / canvasExtentHeight);
rotateTransform(projectionMatrix, -viewRotation);
translateTransform(projectionMatrix,
imageExtent[0] - viewCenter[0],
imageExtent[1] - viewCenter[1]);
scaleTransform(projectionMatrix,
(imageExtent[2] - imageExtent[0]) / 2,
(imageExtent[3] - imageExtent[1]) / 2);
translateTransform(projectionMatrix, 1, 1);
}
/**
* @inheritDoc
*/
forEachLayerAtPixel(pixel, frameState, callback, thisArg) {
if (!this.image_ || !this.image_.getImage()) {
return undefined;
}
const imageSize =
[this.image_.getImage().width, this.image_.getImage().height];
if (!this.hitTransformationMatrix_) {
this.hitTransformationMatrix_ = this.getHitTransformationMatrix_(
frameState.size, imageSize);
}
const pixelOnFrameBuffer = applyTransform(
this.hitTransformationMatrix_, pixel.slice());
if (pixelOnFrameBuffer[0] < 0 || pixelOnFrameBuffer[0] > imageSize[0] ||
pixelOnFrameBuffer[1] < 0 || pixelOnFrameBuffer[1] > imageSize[1]) {
// outside the image, no need to check
return undefined;
}
if (!this.hitCanvasContext_) {
this.hitCanvasContext_ = createCanvasContext2D(1, 1);
}
this.hitCanvasContext_.clearRect(0, 0, 1, 1);
this.hitCanvasContext_.drawImage(this.image_.getImage(),
pixelOnFrameBuffer[0], pixelOnFrameBuffer[1], 1, 1, 0, 0, 1, 1);
const imageData = this.hitCanvasContext_.getImageData(0, 0, 1, 1).data;
if (imageData[3] > 0) {
return callback.call(thisArg, this.getLayer(), imageData);
} else {
return undefined;
}
}
/**
* The transformation matrix to get the pixel on the image for a
* pixel on the map.
* @param {import("../../size.js").Size} mapSize The map size.
* @param {import("../../size.js").Size} imageSize The image size.
* @return {import("../../transform.js").Transform} The transformation matrix.
* @private
*/
getHitTransformationMatrix_(mapSize, imageSize) {
// the first matrix takes a map pixel, flips the y-axis and scales to
// a range between -1 ... 1
const mapCoordTransform = createTransform();
translateTransform(mapCoordTransform, -1, -1);
scaleTransform(mapCoordTransform, 2 / mapSize[0], 2 / mapSize[1]);
translateTransform(mapCoordTransform, 0, mapSize[1]);
scaleTransform(mapCoordTransform, 1, -1);
// the second matrix is the inverse of the projection matrix used in the
// shader for drawing
const projectionMatrixInv = invertTransform(this.projectionMatrix.slice());
// the third matrix scales to the image dimensions and flips the y-axis again
const transform = createTransform();
translateTransform(transform, 0, imageSize[1]);
scaleTransform(transform, 1, -1);
scaleTransform(transform, imageSize[0] / 2, imageSize[1] / 2);
translateTransform(transform, 1, 1);
multiplyTransform(transform, projectionMatrixInv);
multiplyTransform(transform, mapCoordTransform);
return transform;
}
}
export default WebGLImageLayerRenderer;

View File

@@ -1,263 +0,0 @@
/**
* @module ol/renderer/webgl/Layer
*/
import {abstract} from '../../util.js';
import RenderEvent from '../../render/Event.js';
import RenderEventType from '../../render/EventType.js';
import WebGLImmediateRenderer from '../../render/webgl/Immediate.js';
import LayerRenderer from '../Layer.js';
import {fragment, vertex} from './defaultmapshader.js';
import Locations from './defaultmapshader/Locations.js';
import {create as createTransform} from '../../transform.js';
import {create, fromTransform} from '../../vec/mat4.js';
import {ARRAY_BUFFER, FRAMEBUFFER, FLOAT, TEXTURE_2D,
TRIANGLE_STRIP, COLOR_ATTACHMENT0} from '../../webgl.js';
import WebGLBuffer from '../../webgl/Buffer.js';
import {createEmptyTexture} from '../../webgl/Context.js';
/**
* @abstract
*/
class WebGLLayerRenderer extends LayerRenderer {
/**
* @param {import("./Map.js").default} mapRenderer Map renderer.
* @param {import("../../layer/Layer.js").default} layer Layer.
*/
constructor(mapRenderer, layer) {
super(layer);
/**
* @protected
* @type {import("./Map.js").default}
*/
this.mapRenderer = mapRenderer;
/**
* @private
* @type {import("../../webgl/Buffer.js").default}
*/
this.arrayBuffer_ = new WebGLBuffer([
-1, -1, 0, 0,
1, -1, 1, 0,
-1, 1, 0, 1,
1, 1, 1, 1
]);
/**
* @protected
* @type {WebGLTexture}
*/
this.texture = null;
/**
* @protected
* @type {WebGLFramebuffer}
*/
this.framebuffer = null;
/**
* @protected
* @type {number|undefined}
*/
this.framebufferDimension = undefined;
/**
* @protected
* @type {import("../../transform.js").Transform}
*/
this.texCoordMatrix = createTransform();
/**
* @protected
* @type {import("../../transform.js").Transform}
*/
this.projectionMatrix = createTransform();
/**
* @type {Array<number>}
* @private
*/
this.tmpMat4_ = create();
/**
* @private
* @type {import("./defaultmapshader/Locations.js").default}
*/
this.defaultLocations_ = null;
}
/**
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @param {number} framebufferDimension Framebuffer dimension.
* @protected
*/
bindFramebuffer(frameState, framebufferDimension) {
const gl = this.mapRenderer.getGL();
if (this.framebufferDimension === undefined ||
this.framebufferDimension != framebufferDimension) {
/**
* @param {WebGLRenderingContext} gl GL.
* @param {WebGLFramebuffer} framebuffer Framebuffer.
* @param {WebGLTexture} texture Texture.
*/
const postRenderFunction = function(gl, framebuffer, texture) {
if (!gl.isContextLost()) {
gl.deleteFramebuffer(framebuffer);
gl.deleteTexture(texture);
}
}.bind(null, gl, this.framebuffer, this.texture);
frameState.postRenderFunctions.push(
/** @type {import("../../PluggableMap.js").PostRenderFunction} */ (postRenderFunction)
);
const texture = createEmptyTexture(
gl, framebufferDimension, framebufferDimension);
const framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(FRAMEBUFFER,
COLOR_ATTACHMENT0, TEXTURE_2D, texture, 0);
this.texture = texture;
this.framebuffer = framebuffer;
this.framebufferDimension = framebufferDimension;
} else {
gl.bindFramebuffer(FRAMEBUFFER, this.framebuffer);
}
}
/**
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @param {import("../../layer/Layer.js").State} layerState Layer state.
* @param {import("../../webgl/Context.js").default} context Context.
*/
composeFrame(frameState, layerState, context) {
this.dispatchComposeEvent_(RenderEventType.PRECOMPOSE, context, frameState);
context.bindBuffer(ARRAY_BUFFER, this.arrayBuffer_);
const gl = context.getGL();
const program = context.getProgram(fragment, vertex);
let locations;
if (!this.defaultLocations_) {
locations = new Locations(gl, program);
this.defaultLocations_ = locations;
} else {
locations = this.defaultLocations_;
}
if (context.useProgram(program)) {
gl.enableVertexAttribArray(locations.a_position);
gl.vertexAttribPointer(
locations.a_position, 2, FLOAT, false, 16, 0);
gl.enableVertexAttribArray(locations.a_texCoord);
gl.vertexAttribPointer(
locations.a_texCoord, 2, FLOAT, false, 16, 8);
gl.uniform1i(locations.u_texture, 0);
}
gl.uniformMatrix4fv(locations.u_texCoordMatrix, false,
fromTransform(this.tmpMat4_, this.getTexCoordMatrix()));
gl.uniformMatrix4fv(locations.u_projectionMatrix, false,
fromTransform(this.tmpMat4_, this.getProjectionMatrix()));
gl.uniform1f(locations.u_opacity, layerState.opacity);
gl.bindTexture(TEXTURE_2D, this.getTexture());
gl.drawArrays(TRIANGLE_STRIP, 0, 4);
this.dispatchComposeEvent_(RenderEventType.POSTCOMPOSE, context, frameState);
}
/**
* @param {import("../../render/EventType.js").default} type Event type.
* @param {import("../../webgl/Context.js").default} context WebGL context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @private
*/
dispatchComposeEvent_(type, context, frameState) {
const layer = this.getLayer();
if (layer.hasListener(type)) {
const viewState = frameState.viewState;
const resolution = viewState.resolution;
const pixelRatio = frameState.pixelRatio;
const extent = frameState.extent;
const center = viewState.center;
const rotation = viewState.rotation;
const size = frameState.size;
const render = new WebGLImmediateRenderer(
context, center, resolution, rotation, size, extent, pixelRatio);
const composeEvent = new RenderEvent(
type, render, frameState, null, context);
layer.dispatchEvent(composeEvent);
}
}
/**
* @return {!import("../../transform.js").Transform} Matrix.
*/
getTexCoordMatrix() {
return this.texCoordMatrix;
}
/**
* @return {WebGLTexture} Texture.
*/
getTexture() {
return this.texture;
}
/**
* @return {!import("../../transform.js").Transform} Matrix.
*/
getProjectionMatrix() {
return this.projectionMatrix;
}
/**
* Handle webglcontextlost.
*/
handleWebGLContextLost() {
this.texture = null;
this.framebuffer = null;
this.framebufferDimension = undefined;
}
/**
* @abstract
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @param {import("../../layer/Layer.js").State} layerState Layer state.
* @param {import("../../webgl/Context.js").default} context Context.
* @return {boolean} whether composeFrame should be called.
*/
prepareFrame(frameState, layerState, context) {
return abstract();
}
/**
* @abstract
* @param {import("../../pixel.js").Pixel} pixel Pixel.
* @param {import("../../PluggableMap.js").FrameState} frameState FrameState.
* @param {function(this: S, import("../../layer/Layer.js").default, (Uint8ClampedArray|Uint8Array)): T} callback Layer
* callback.
* @param {S} thisArg Value to use as `this` when executing `callback`.
* @return {T|undefined} Callback result.
* @template S,T,U
*/
forEachLayerAtPixel(pixel, frameState, callback, thisArg) {
return abstract();
}
}
export default WebGLLayerRenderer;

View File

@@ -1,582 +0,0 @@
/**
* @module ol/renderer/webgl/Map
*/
import {stableSort} from '../../array.js';
import {CLASS_UNSELECTABLE} from '../../css.js';
import {createCanvasContext2D} from '../../dom.js';
import {listen} from '../../events.js';
import {visibleAtResolution} from '../../layer/Layer.js';
import RenderEvent from '../../render/Event.js';
import RenderEventType from '../../render/EventType.js';
import WebGLImmediateRenderer from '../../render/webgl/Immediate.js';
import MapRenderer, {sortByZIndex} from '../Map.js';
import SourceState from '../../source/State.js';
import LRUCache from '../../structs/LRUCache.js';
import PriorityQueue from '../../structs/PriorityQueue.js';
import {BLEND, CLAMP_TO_EDGE, COLOR_BUFFER_BIT, CULL_FACE, DEPTH_TEST, FRAMEBUFFER,
getContext, LINEAR, ONE, ONE_MINUS_SRC_ALPHA, RGBA, SCISSOR_TEST, SRC_ALPHA,
STENCIL_TEST, TEXTURE0, TEXTURE_2D, TEXTURE_MAG_FILTER, TEXTURE_MIN_FILTER,
TEXTURE_WRAP_S, TEXTURE_WRAP_T, UNSIGNED_BYTE} from '../../webgl.js';
import WebGLContext from '../../webgl/Context.js';
import ContextEventType from '../../webgl/ContextEventType.js';
/**
* @typedef {Object} TextureCacheEntry
* @property {number} magFilter
* @property {number} minFilter
* @property {WebGLTexture} texture
*/
/**
* Texture cache high water mark.
* @type {number}
*/
const WEBGL_TEXTURE_CACHE_HIGH_WATER_MARK = 1024;
/**
* @classdesc
* WebGL map renderer.
* @api
*/
class WebGLMapRenderer extends MapRenderer {
/**
* @param {import("../../PluggableMap.js").default} map Map.
*/
constructor(map) {
super(map);
const container = map.getViewport();
/**
* @private
* @type {HTMLCanvasElement}
*/
this.canvas_ = /** @type {HTMLCanvasElement} */
(document.createElement('canvas'));
this.canvas_.style.width = '100%';
this.canvas_.style.height = '100%';
this.canvas_.style.display = 'block';
this.canvas_.className = CLASS_UNSELECTABLE;
container.insertBefore(this.canvas_, container.childNodes[0] || null);
/**
* @private
* @type {number}
*/
this.clipTileCanvasWidth_ = 0;
/**
* @private
* @type {number}
*/
this.clipTileCanvasHeight_ = 0;
/**
* @private
* @type {CanvasRenderingContext2D}
*/
this.clipTileContext_ = createCanvasContext2D();
/**
* @private
* @type {boolean}
*/
this.renderedVisible_ = true;
/**
* @private
* @type {WebGLRenderingContext}
*/
this.gl_ = getContext(this.canvas_, {
antialias: true,
depth: true,
failIfMajorPerformanceCaveat: true,
preserveDrawingBuffer: false,
stencil: true
});
/**
* @private
* @type {import("../../webgl/Context.js").default}
*/
this.context_ = new WebGLContext(this.canvas_, this.gl_);
listen(this.canvas_, ContextEventType.LOST,
this.handleWebGLContextLost, this);
listen(this.canvas_, ContextEventType.RESTORED,
this.handleWebGLContextRestored, this);
/**
* @private
* @type {import("../../structs/LRUCache.js").default<TextureCacheEntry|null>}
*/
this.textureCache_ = new LRUCache();
/**
* @private
* @type {import("../../coordinate.js").Coordinate}
*/
this.focus_ = null;
/**
* @private
* @type {import("../../structs/PriorityQueue.js").default<Array>}
*/
this.tileTextureQueue_ = new PriorityQueue(
/**
* @param {Array<*>} element Element.
* @return {number} Priority.
* @this {WebGLMapRenderer}
*/
(function(element) {
const tileCenter = /** @type {import("../../coordinate.js").Coordinate} */ (element[1]);
const tileResolution = /** @type {number} */ (element[2]);
const deltaX = tileCenter[0] - this.focus_[0];
const deltaY = tileCenter[1] - this.focus_[1];
return 65536 * Math.log(tileResolution) +
Math.sqrt(deltaX * deltaX + deltaY * deltaY) / tileResolution;
}).bind(this),
/**
* @param {Array<*>} element Element.
* @return {string} Key.
*/
function(element) {
return (
/** @type {import("../../Tile.js").default} */ (element[0]).getKey()
);
});
/**
* @param {import("../../PluggableMap.js").default} map Map.
* @param {?import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {boolean} false.
* @this {WebGLMapRenderer}
*/
this.loadNextTileTexture_ =
function(map, frameState) {
if (!this.tileTextureQueue_.isEmpty()) {
this.tileTextureQueue_.reprioritize();
const element = this.tileTextureQueue_.dequeue();
const tile = /** @type {import("../../Tile.js").default} */ (element[0]);
const tileSize = /** @type {import("../../size.js").Size} */ (element[3]);
const tileGutter = /** @type {number} */ (element[4]);
this.bindTileTexture(
tile, tileSize, tileGutter, LINEAR, LINEAR);
}
return false;
}.bind(this);
/**
* @private
* @type {number}
*/
this.textureCacheFrameMarkerCount_ = 0;
this.initializeGL_();
}
/**
* @param {import("../../Tile.js").default} tile Tile.
* @param {import("../../size.js").Size} tileSize Tile size.
* @param {number} tileGutter Tile gutter.
* @param {number} magFilter Mag filter.
* @param {number} minFilter Min filter.
*/
bindTileTexture(tile, tileSize, tileGutter, magFilter, minFilter) {
const gl = this.getGL();
const tileKey = tile.getKey();
if (this.textureCache_.containsKey(tileKey)) {
const textureCacheEntry = this.textureCache_.get(tileKey);
gl.bindTexture(TEXTURE_2D, textureCacheEntry.texture);
if (textureCacheEntry.magFilter != magFilter) {
gl.texParameteri(
TEXTURE_2D, TEXTURE_MAG_FILTER, magFilter);
textureCacheEntry.magFilter = magFilter;
}
if (textureCacheEntry.minFilter != minFilter) {
gl.texParameteri(
TEXTURE_2D, TEXTURE_MIN_FILTER, minFilter);
textureCacheEntry.minFilter = minFilter;
}
} else {
const texture = gl.createTexture();
const imageTile = /** @type {import("../../ImageTile.js").default} */ (tile);
gl.bindTexture(TEXTURE_2D, texture);
if (tileGutter > 0) {
const clipTileCanvas = this.clipTileContext_.canvas;
const clipTileContext = this.clipTileContext_;
if (this.clipTileCanvasWidth_ !== tileSize[0] ||
this.clipTileCanvasHeight_ !== tileSize[1]) {
clipTileCanvas.width = tileSize[0];
clipTileCanvas.height = tileSize[1];
this.clipTileCanvasWidth_ = tileSize[0];
this.clipTileCanvasHeight_ = tileSize[1];
} else {
clipTileContext.clearRect(0, 0, tileSize[0], tileSize[1]);
}
clipTileContext.drawImage(imageTile.getImage(), tileGutter, tileGutter,
tileSize[0], tileSize[1], 0, 0, tileSize[0], tileSize[1]);
gl.texImage2D(TEXTURE_2D, 0,
RGBA, RGBA,
UNSIGNED_BYTE, clipTileCanvas);
} else {
gl.texImage2D(TEXTURE_2D, 0,
RGBA, RGBA,
UNSIGNED_BYTE, imageTile.getImage());
}
gl.texParameteri(
TEXTURE_2D, TEXTURE_MAG_FILTER, magFilter);
gl.texParameteri(
TEXTURE_2D, TEXTURE_MIN_FILTER, minFilter);
gl.texParameteri(TEXTURE_2D, TEXTURE_WRAP_S,
CLAMP_TO_EDGE);
gl.texParameteri(TEXTURE_2D, TEXTURE_WRAP_T,
CLAMP_TO_EDGE);
this.textureCache_.set(tileKey, {
texture: texture,
magFilter: magFilter,
minFilter: minFilter
});
}
}
/**
* @param {import("../../render/EventType.js").default} type Event type.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
*/
dispatchRenderEvent(type, frameState) {
const map = this.getMap();
if (map.hasListener(type)) {
const context = this.context_;
const extent = frameState.extent;
const size = frameState.size;
const viewState = frameState.viewState;
const pixelRatio = frameState.pixelRatio;
const resolution = viewState.resolution;
const center = viewState.center;
const rotation = viewState.rotation;
const vectorContext = new WebGLImmediateRenderer(context,
center, resolution, rotation, size, extent, pixelRatio);
const composeEvent = new RenderEvent(type, vectorContext,
frameState, null, context);
map.dispatchEvent(composeEvent);
}
}
/**
* @inheritDoc
*/
disposeInternal() {
const gl = this.getGL();
if (!gl.isContextLost()) {
this.textureCache_.forEach(
/**
* @param {?TextureCacheEntry} textureCacheEntry
* Texture cache entry.
*/
function(textureCacheEntry) {
if (textureCacheEntry) {
gl.deleteTexture(textureCacheEntry.texture);
}
});
}
this.context_.dispose();
super.disposeInternal();
}
/**
* @param {import("../../PluggableMap.js").default} map Map.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @private
*/
expireCache_(map, frameState) {
const gl = this.getGL();
let textureCacheEntry;
while (this.textureCache_.getCount() - this.textureCacheFrameMarkerCount_ >
WEBGL_TEXTURE_CACHE_HIGH_WATER_MARK) {
textureCacheEntry = this.textureCache_.peekLast();
if (!textureCacheEntry) {
if (+this.textureCache_.peekLastKey() == frameState.index) {
break;
} else {
--this.textureCacheFrameMarkerCount_;
}
} else {
gl.deleteTexture(textureCacheEntry.texture);
}
this.textureCache_.pop();
}
}
/**
* @return {import("../../webgl/Context.js").default} The context.
*/
getContext() {
return this.context_;
}
/**
* @return {WebGLRenderingContext} GL.
*/
getGL() {
return this.gl_;
}
/**
* @return {import("../../structs/PriorityQueue.js").default<Array>} Tile texture queue.
*/
getTileTextureQueue() {
return this.tileTextureQueue_;
}
/**
* @param {import("../../events/Event.js").default} event Event.
* @protected
*/
handleWebGLContextLost(event) {
event.preventDefault();
this.textureCache_.clear();
this.textureCacheFrameMarkerCount_ = 0;
const renderers = this.getLayerRenderers();
for (const id in renderers) {
const renderer = /** @type {import("./Layer.js").default} */ (renderers[id]);
renderer.handleWebGLContextLost();
}
}
/**
* @protected
*/
handleWebGLContextRestored() {
this.initializeGL_();
this.getMap().render();
}
/**
* @private
*/
initializeGL_() {
const gl = this.gl_;
gl.activeTexture(TEXTURE0);
gl.blendFuncSeparate(
SRC_ALPHA, ONE_MINUS_SRC_ALPHA,
ONE, ONE_MINUS_SRC_ALPHA);
gl.disable(CULL_FACE);
gl.disable(DEPTH_TEST);
gl.disable(SCISSOR_TEST);
gl.disable(STENCIL_TEST);
}
/**
* @param {import("../../Tile.js").default} tile Tile.
* @return {boolean} Is tile texture loaded.
*/
isTileTextureLoaded(tile) {
return this.textureCache_.containsKey(tile.getKey());
}
/**
* @inheritDoc
*/
renderFrame(frameState) {
const context = this.getContext();
const gl = this.getGL();
if (gl.isContextLost()) {
return false;
}
if (!frameState) {
if (this.renderedVisible_) {
this.canvas_.style.display = 'none';
this.renderedVisible_ = false;
}
return false;
}
this.focus_ = frameState.focus;
this.textureCache_.set((-frameState.index).toString(), null);
++this.textureCacheFrameMarkerCount_;
this.dispatchRenderEvent(RenderEventType.PRECOMPOSE, frameState);
/** @type {Array<import("../../layer/Layer.js").State>} */
const layerStatesToDraw = [];
const layerStatesArray = frameState.layerStatesArray;
stableSort(layerStatesArray, sortByZIndex);
const viewResolution = frameState.viewState.resolution;
let i, ii;
for (i = 0, ii = layerStatesArray.length; i < ii; ++i) {
const layerState = layerStatesArray[i];
if (visibleAtResolution(layerState, viewResolution) &&
layerState.sourceState == SourceState.READY) {
const layerRenderer = /** @type {import("./Layer.js").default} */ (this.getLayerRenderer(layerState.layer));
if (layerRenderer.prepareFrame(frameState, layerState, context)) {
layerStatesToDraw.push(layerState);
}
}
}
const width = frameState.size[0] * frameState.pixelRatio;
const height = frameState.size[1] * frameState.pixelRatio;
if (this.canvas_.width != width || this.canvas_.height != height) {
this.canvas_.width = width;
this.canvas_.height = height;
}
gl.bindFramebuffer(FRAMEBUFFER, null);
gl.clearColor(0, 0, 0, 0);
gl.clear(COLOR_BUFFER_BIT);
gl.enable(BLEND);
gl.viewport(0, 0, this.canvas_.width, this.canvas_.height);
for (i = 0, ii = layerStatesToDraw.length; i < ii; ++i) {
const layerState = layerStatesToDraw[i];
const layerRenderer = /** @type {import("./Layer.js").default} */ (this.getLayerRenderer(layerState.layer));
layerRenderer.composeFrame(frameState, layerState, context);
}
if (!this.renderedVisible_) {
this.canvas_.style.display = '';
this.renderedVisible_ = true;
}
this.calculateMatrices2D(frameState);
if (this.textureCache_.getCount() - this.textureCacheFrameMarkerCount_ >
WEBGL_TEXTURE_CACHE_HIGH_WATER_MARK) {
frameState.postRenderFunctions.push(
/** @type {import("../../PluggableMap.js").PostRenderFunction} */ (this.expireCache_.bind(this))
);
}
if (!this.tileTextureQueue_.isEmpty()) {
frameState.postRenderFunctions.push(this.loadNextTileTexture_);
frameState.animate = true;
}
this.dispatchRenderEvent(RenderEventType.POSTCOMPOSE, frameState);
this.scheduleRemoveUnusedLayerRenderers(frameState);
this.scheduleExpireIconCache(frameState);
}
/**
* @inheritDoc
*/
forEachFeatureAtCoordinate(
coordinate,
frameState,
hitTolerance,
callback,
thisArg,
layerFilter,
thisArg2
) {
let result;
if (this.getGL().isContextLost()) {
return false;
}
const viewState = frameState.viewState;
const layerStates = frameState.layerStatesArray;
const numLayers = layerStates.length;
let i;
for (i = numLayers - 1; i >= 0; --i) {
const layerState = layerStates[i];
const layer = layerState.layer;
if (visibleAtResolution(layerState, viewState.resolution) &&
layerFilter.call(thisArg2, layer)) {
const layerRenderer = this.getLayerRenderer(layer);
result = layerRenderer.forEachFeatureAtCoordinate(
coordinate, frameState, hitTolerance, callback);
if (result) {
return result;
}
}
}
return undefined;
}
/**
* @inheritDoc
*/
hasFeatureAtCoordinate(coordinate, frameState, hitTolerance, layerFilter, thisArg) {
let hasFeature = false;
if (this.getGL().isContextLost()) {
return false;
}
const viewState = frameState.viewState;
const layerStates = frameState.layerStatesArray;
const numLayers = layerStates.length;
let i;
for (i = numLayers - 1; i >= 0; --i) {
const layerState = layerStates[i];
const layer = layerState.layer;
if (visibleAtResolution(layerState, viewState.resolution) &&
layerFilter.call(thisArg, layer)) {
const layerRenderer = this.getLayerRenderer(layer);
hasFeature =
layerRenderer.hasFeatureAtCoordinate(coordinate, frameState);
if (hasFeature) {
return true;
}
}
}
return hasFeature;
}
/**
* @inheritDoc
*/
forEachLayerAtPixel(pixel, frameState, hitTolerance, callback, thisArg, layerFilter, thisArg2) {
if (this.getGL().isContextLost()) {
return false;
}
const viewState = frameState.viewState;
let result;
const layerStates = frameState.layerStatesArray;
const numLayers = layerStates.length;
let i;
for (i = numLayers - 1; i >= 0; --i) {
const layerState = layerStates[i];
const layer = layerState.layer;
if (visibleAtResolution(layerState, viewState.resolution) &&
layerFilter.call(thisArg, layer)) {
const layerRenderer = /** @type {import("./Layer.js").default} */ (this.getLayerRenderer(layer));
result = layerRenderer.forEachLayerAtPixel(
pixel, frameState, callback, thisArg);
if (result) {
return result;
}
}
}
return undefined;
}
}
export default WebGLMapRenderer;

View File

@@ -0,0 +1,234 @@
/**
* @module ol/renderer/webgl/PointsLayer
*/
import LayerRenderer from '../Layer';
import WebGLArrayBuffer from '../../webgl/Buffer';
import {DYNAMIC_DRAW, ARRAY_BUFFER, ELEMENT_ARRAY_BUFFER, FLOAT} from '../../webgl';
import WebGLHelper, {DefaultAttrib, DefaultUniform} from '../../webgl/Helper';
import WebGLVertex from '../../webgl/Vertex';
import WebGLFragment from '../../webgl/Fragment';
import GeometryType from '../../geom/GeometryType';
const VERTEX_SHADER = `
precision mediump float;
attribute vec2 a_position;
attribute vec2 a_texCoord;
attribute float a_rotateWithView;
attribute vec2 a_offsets;
uniform mat4 u_projectionMatrix;
uniform mat4 u_offsetScaleMatrix;
uniform mat4 u_offsetRotateMatrix;
varying vec2 v_texCoord;
void main(void) {
mat4 offsetMatrix = u_offsetScaleMatrix;
if (a_rotateWithView == 1.0) {
offsetMatrix = u_offsetScaleMatrix * u_offsetRotateMatrix;
}
vec4 offsets = offsetMatrix * vec4(a_offsets, 0.0, 0.0);
gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0) + offsets;
v_texCoord = a_texCoord;
}`;
const FRAGMENT_SHADER = `
precision mediump float;
uniform float u_opacity;
varying vec2 v_texCoord;
void main(void) {
gl_FragColor.rgb = vec3(1.0, 1.0, 1.0);
float alpha = u_opacity;
if (alpha == 0.0) {
discard;
}
gl_FragColor.a = alpha;
}`;
/**
* @typedef {Object} PostProcessesOptions
* @property {number} [scaleRatio] Scale ratio; if < 1, the post process will render to a texture smaller than
* the main canvas that will then be sampled up (useful for saving resource on blur steps).
* @property {string} [vertexShader] Vertex shader source
* @property {string} [fragmentShader] Fragment shader source
* @property {Object.<string,import("../../webgl/Helper").UniformValue>} [uniforms] Uniform definitions for the post process step
*/
/**
* @typedef {Object} Options
* @property {function(import("../../Feature").default):number} [sizeCallback] Will be called on every feature in the
* source to compute the size of the quad on screen (in pixels). This only done on source change.
* @property {function(import("../../Feature").default, number):number} [coordCallback] Will be called on every feature in the
* source to compute the coordinate of the quad center on screen (in pixels). This only done on source change.
* The second argument is 0 for `x` component and 1 for `y`.
* @property {string} [vertexShader] Vertex shader source
* @property {string} [fragmentShader] Fragment shader source
* @property {Object.<string,import("../../webgl/Helper").UniformValue>} [uniforms] Uniform definitions for the post process steps
* @property {Array<PostProcessesOptions>} [postProcesses] Post-processes definitions
*/
/**
* @classdesc
* WebGL vector renderer optimized for points.
* All features will be rendered as quads (two triangles forming a square). New data will be flushed to the GPU
* every time the vector source changes.
*
* Use shaders to customize the final output.
*
* This uses {@link module:ol/webgl/Helper~WebGLHelper} internally.
*
* Default shaders are shown hereafter:
*
* * Vertex shader:
* ```
* precision mediump float;
* attribute vec2 a_position;
* attribute vec2 a_texCoord;
* attribute float a_rotateWithView;
* attribute vec2 a_offsets;
*
* uniform mat4 u_projectionMatrix;
* uniform mat4 u_offsetScaleMatrix;
* uniform mat4 u_offsetRotateMatrix;
*
* varying vec2 v_texCoord;
*
* void main(void) {
* mat4 offsetMatrix = u_offsetScaleMatrix;
* if (a_rotateWithView == 1.0) {
* offsetMatrix = u_offsetScaleMatrix * u_offsetRotateMatrix;
* }
* vec4 offsets = offsetMatrix * vec4(a_offsets, 0.0, 0.0);
* gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0) + offsets;
* v_texCoord = a_texCoord;
* }
* ```
*
* * Fragment shader:
* ```
* precision mediump float;
* uniform float u_opacity;
*
* varying vec2 v_texCoord;
*
* void main(void) {
* gl_FragColor.rgb = vec3(1.0, 1.0, 1.0);
* float alpha = u_opacity;
* if (alpha == 0.0) {
* discard;
* }
* gl_FragColor.a = alpha;
* }
* ```
*
* @api
*/
class WebGLPointsLayerRenderer extends LayerRenderer {
/**
* @param {import("../../layer/Vector.js").default} vectorLayer Vector layer.
* @param {Options=} [opt_options] Options.
*/
constructor(vectorLayer, opt_options) {
super(vectorLayer);
const options = opt_options || {};
this.context_ = new WebGLHelper({
postProcesses: options.postProcesses,
uniforms: options.uniforms
});
this.sourceRevision_ = -1;
this.verticesBuffer_ = new WebGLArrayBuffer([], DYNAMIC_DRAW);
this.indicesBuffer_ = new WebGLArrayBuffer([], DYNAMIC_DRAW);
const vertexShader = new WebGLVertex(options.vertexShader || VERTEX_SHADER);
const fragmentShader = new WebGLFragment(options.fragmentShader || FRAGMENT_SHADER);
this.program_ = this.context_.getProgram(fragmentShader, vertexShader);
this.context_.useProgram(this.program_);
this.sizeCallback_ = options.sizeCallback || function(feature) {
return 1;
};
this.coordCallback_ = options.coordCallback || function(feature, index) {
const geom = /** @type {import("../../geom/Point").default} */ (feature.getGeometry());
return geom.getCoordinates()[index];
};
}
/**
* @inheritDoc
*/
disposeInternal() {
super.disposeInternal();
}
/**
* @inheritDoc
*/
renderFrame(frameState, layerState) {
this.context_.setUniformFloatValue(DefaultUniform.OPACITY, layerState.opacity);
this.context_.drawElements(0, this.indicesBuffer_.getArray().length);
this.context_.finalizeDraw(frameState);
return this.context_.getCanvas();
}
/**
* @inheritDoc
*/
prepareFrame(frameState) {
const vectorLayer = /** @type {import("../../layer/Vector.js").default} */ (this.getLayer());
const vectorSource = /** @type {import("../../source/Vector.js").default} */ (vectorLayer.getSource());
this.context_.prepareDraw(frameState);
if (this.sourceRevision_ < vectorSource.getRevision()) {
this.sourceRevision_ = vectorSource.getRevision();
const viewState = frameState.viewState;
const projection = viewState.projection;
const resolution = viewState.resolution;
// loop on features to fill the buffer
vectorSource.loadFeatures([-Infinity, -Infinity, Infinity, Infinity], resolution, projection);
vectorSource.forEachFeature((feature) => {
if (!feature.getGeometry() || feature.getGeometry().getType() !== GeometryType.POINT) {
return;
}
const x = this.coordCallback_(feature, 0);
const y = this.coordCallback_(feature, 1);
const size = this.sizeCallback_(feature);
const stride = 6;
const baseIndex = this.verticesBuffer_.getArray().length / stride;
this.verticesBuffer_.getArray().push(
x, y, -size / 2, -size / 2, 0, 0,
x, y, +size / 2, -size / 2, 1, 0,
x, y, +size / 2, +size / 2, 1, 1,
x, y, -size / 2, +size / 2, 0, 1
);
this.indicesBuffer_.getArray().push(
baseIndex, baseIndex + 1, baseIndex + 3,
baseIndex + 1, baseIndex + 2, baseIndex + 3
);
});
}
// write new data
this.context_.bindBuffer(ARRAY_BUFFER, this.verticesBuffer_);
this.context_.bindBuffer(ELEMENT_ARRAY_BUFFER, this.indicesBuffer_);
const bytesPerFloat = Float32Array.BYTES_PER_ELEMENT;
this.context_.enableAttributeArray(DefaultAttrib.POSITION, 2, FLOAT, bytesPerFloat * 6, 0);
this.context_.enableAttributeArray(DefaultAttrib.OFFSETS, 2, FLOAT, bytesPerFloat * 6, bytesPerFloat * 2);
this.context_.enableAttributeArray(DefaultAttrib.TEX_COORD, 2, FLOAT, bytesPerFloat * 6, bytesPerFloat * 4);
return true;
}
}
export default WebGLPointsLayerRenderer;

View File

@@ -1,398 +0,0 @@
/**
* @module ol/renderer/webgl/TileLayer
*/
// FIXME large resolutions lead to too large framebuffers :-(
// FIXME animated shaders! check in redraw
import ImageTile from '../../ImageTile.js';
import TileRange from '../../TileRange.js';
import TileState from '../../TileState.js';
import TileSource from '../../source/Tile.js';
import {numberSafeCompareFunction} from '../../array.js';
import {createEmpty, intersects} from '../../extent.js';
import {roundUpToPowerOfTwo} from '../../math.js';
import WebGLLayerRenderer from './Layer.js';
import {fragment, vertex} from './tilelayershader.js';
import Locations from './tilelayershader/Locations.js';
import {toSize} from '../../size.js';
import {
reset as resetTransform,
rotate as rotateTransform,
scale as scaleTransform,
translate as translateTransform,
apply as applyTransform
} from '../../transform.js';
import {COLOR_BUFFER_BIT, BLEND, ARRAY_BUFFER, FLOAT, LINEAR, TRIANGLE_STRIP} from '../../webgl.js';
import WebGLBuffer from '../../webgl/Buffer.js';
/**
* @classdesc
* WebGL renderer for tile layers.
* @api
*/
class WebGLTileLayerRenderer extends WebGLLayerRenderer {
/**
* @param {import("./Map.js").default} mapRenderer Map renderer.
* @param {import("../../layer/Tile.js").default} tileLayer Tile layer.
*/
constructor(mapRenderer, tileLayer) {
super(mapRenderer, tileLayer);
/**
* @private
* @type {import("../../webgl/Fragment.js").default}
*/
this.fragmentShader_ = fragment;
/**
* @private
* @type {import("../../webgl/Vertex.js").default}
*/
this.vertexShader_ = vertex;
/**
* @private
* @type {import("./tilelayershader/Locations.js").default}
*/
this.locations_ = null;
/**
* @private
* @type {import("../../webgl/Buffer.js").default}
*/
this.renderArrayBuffer_ = new WebGLBuffer([
0, 0, 0, 1,
1, 0, 1, 1,
0, 1, 0, 0,
1, 1, 1, 0
]);
/**
* @private
* @type {import("../../TileRange.js").default}
*/
this.renderedTileRange_ = null;
/**
* @private
* @type {import("../../extent.js").Extent}
*/
this.renderedFramebufferExtent_ = null;
/**
* @private
* @type {number}
*/
this.renderedRevision_ = -1;
/**
* @private
* @type {import("../../size.js").Size}
*/
this.tmpSize_ = [0, 0];
}
/**
* @inheritDoc
*/
disposeInternal() {
const context = this.mapRenderer.getContext();
context.deleteBuffer(this.renderArrayBuffer_);
super.disposeInternal();
}
/**
* @inheritDoc
*/
createLoadedTileFinder(source, projection, tiles) {
const mapRenderer = this.mapRenderer;
return (
/**
* @param {number} zoom Zoom level.
* @param {import("../../TileRange.js").default} tileRange Tile range.
* @return {boolean} The tile range is fully loaded.
*/
function(zoom, tileRange) {
function callback(tile) {
const loaded = mapRenderer.isTileTextureLoaded(tile);
if (loaded) {
if (!tiles[zoom]) {
tiles[zoom] = {};
}
tiles[zoom][tile.tileCoord.toString()] = tile;
}
return loaded;
}
return source.forEachLoadedTile(projection, zoom, tileRange, callback);
}
);
}
/**
* @inheritDoc
*/
handleWebGLContextLost() {
super.handleWebGLContextLost();
this.locations_ = null;
}
/**
* @inheritDoc
*/
prepareFrame(frameState, layerState, context) {
const mapRenderer = this.mapRenderer;
const gl = context.getGL();
const viewState = frameState.viewState;
const projection = viewState.projection;
const tileLayer = /** @type {import("../../layer/Tile.js").default} */ (this.getLayer());
const tileSource = tileLayer.getSource();
if (!(tileSource instanceof TileSource)) {
return true;
}
const tileGrid = tileSource.getTileGridForProjection(projection);
const z = tileGrid.getZForResolution(viewState.resolution);
const tileResolution = tileGrid.getResolution(z);
const tilePixelSize =
tileSource.getTilePixelSize(z, frameState.pixelRatio, projection);
const pixelRatio = tilePixelSize[0] /
toSize(tileGrid.getTileSize(z), this.tmpSize_)[0];
const tilePixelResolution = tileResolution / pixelRatio;
const tileGutter = tileSource.getTilePixelRatio(pixelRatio) * tileSource.getGutterForProjection(projection);
const center = viewState.center;
const extent = frameState.extent;
const tileRange = tileGrid.getTileRangeForExtentAndZ(extent, z);
let framebufferExtent;
if (this.renderedTileRange_ &&
this.renderedTileRange_.equals(tileRange) &&
this.renderedRevision_ == tileSource.getRevision()) {
framebufferExtent = this.renderedFramebufferExtent_;
} else {
const tileRangeSize = tileRange.getSize();
const maxDimension = Math.max(
tileRangeSize[0] * tilePixelSize[0],
tileRangeSize[1] * tilePixelSize[1]);
const framebufferDimension = roundUpToPowerOfTwo(maxDimension);
const framebufferExtentDimension = tilePixelResolution * framebufferDimension;
const origin = tileGrid.getOrigin(z);
const minX = origin[0] +
tileRange.minX * tilePixelSize[0] * tilePixelResolution;
const minY = origin[1] +
tileRange.minY * tilePixelSize[1] * tilePixelResolution;
framebufferExtent = [
minX, minY,
minX + framebufferExtentDimension, minY + framebufferExtentDimension
];
this.bindFramebuffer(frameState, framebufferDimension);
gl.viewport(0, 0, framebufferDimension, framebufferDimension);
gl.clearColor(0, 0, 0, 0);
gl.clear(COLOR_BUFFER_BIT);
gl.disable(BLEND);
const program = context.getProgram(this.fragmentShader_, this.vertexShader_);
context.useProgram(program);
if (!this.locations_) {
this.locations_ = new Locations(gl, program);
}
context.bindBuffer(ARRAY_BUFFER, this.renderArrayBuffer_);
gl.enableVertexAttribArray(this.locations_.a_position);
gl.vertexAttribPointer(
this.locations_.a_position, 2, FLOAT, false, 16, 0);
gl.enableVertexAttribArray(this.locations_.a_texCoord);
gl.vertexAttribPointer(
this.locations_.a_texCoord, 2, FLOAT, false, 16, 8);
gl.uniform1i(this.locations_.u_texture, 0);
/**
* @type {Object<number, Object<string, import("../../Tile.js").default>>}
*/
const tilesToDrawByZ = {};
tilesToDrawByZ[z] = {};
const findLoadedTiles = this.createLoadedTileFinder(
tileSource, projection, tilesToDrawByZ);
const useInterimTilesOnError = tileLayer.getUseInterimTilesOnError();
let allTilesLoaded = true;
const tmpExtent = createEmpty();
const tmpTileRange = new TileRange(0, 0, 0, 0);
let childTileRange, drawable, fullyLoaded, tile, tileState;
let x, y, tileExtent;
for (x = tileRange.minX; x <= tileRange.maxX; ++x) {
for (y = tileRange.minY; y <= tileRange.maxY; ++y) {
tile = tileSource.getTile(z, x, y, pixelRatio, projection);
if (layerState.extent !== undefined) {
// ignore tiles outside layer extent
tileExtent = tileGrid.getTileCoordExtent(tile.tileCoord, tmpExtent);
if (!intersects(tileExtent, layerState.extent)) {
continue;
}
}
tileState = tile.getState();
drawable = tileState == TileState.LOADED ||
tileState == TileState.EMPTY ||
tileState == TileState.ERROR && !useInterimTilesOnError;
if (!drawable) {
tile = tile.getInterimTile();
}
tileState = tile.getState();
if (tileState == TileState.LOADED) {
if (mapRenderer.isTileTextureLoaded(tile)) {
tilesToDrawByZ[z][tile.tileCoord.toString()] = tile;
continue;
}
} else if (tileState == TileState.EMPTY ||
(tileState == TileState.ERROR &&
!useInterimTilesOnError)) {
continue;
}
allTilesLoaded = false;
fullyLoaded = tileGrid.forEachTileCoordParentTileRange(
tile.tileCoord, findLoadedTiles, null, tmpTileRange, tmpExtent);
if (!fullyLoaded) {
childTileRange = tileGrid.getTileCoordChildTileRange(
tile.tileCoord, tmpTileRange, tmpExtent);
if (childTileRange) {
findLoadedTiles(z + 1, childTileRange);
}
}
}
}
/** @type {Array<number>} */
const zs = Object.keys(tilesToDrawByZ).map(Number);
zs.sort(numberSafeCompareFunction);
const u_tileOffset = new Float32Array(4);
for (let i = 0, ii = zs.length; i < ii; ++i) {
const tilesToDraw = tilesToDrawByZ[zs[i]];
for (const tileKey in tilesToDraw) {
tile = tilesToDraw[tileKey];
if (!(tile instanceof ImageTile)) {
continue;
}
tileExtent = tileGrid.getTileCoordExtent(tile.tileCoord, tmpExtent);
u_tileOffset[0] = 2 * (tileExtent[2] - tileExtent[0]) /
framebufferExtentDimension;
u_tileOffset[1] = 2 * (tileExtent[3] - tileExtent[1]) /
framebufferExtentDimension;
u_tileOffset[2] = 2 * (tileExtent[0] - framebufferExtent[0]) /
framebufferExtentDimension - 1;
u_tileOffset[3] = 2 * (tileExtent[1] - framebufferExtent[1]) /
framebufferExtentDimension - 1;
gl.uniform4fv(this.locations_.u_tileOffset, u_tileOffset);
mapRenderer.bindTileTexture(tile, tilePixelSize,
tileGutter * pixelRatio, LINEAR, LINEAR);
gl.drawArrays(TRIANGLE_STRIP, 0, 4);
}
}
if (allTilesLoaded) {
this.renderedTileRange_ = tileRange;
this.renderedFramebufferExtent_ = framebufferExtent;
this.renderedRevision_ = tileSource.getRevision();
} else {
this.renderedTileRange_ = null;
this.renderedFramebufferExtent_ = null;
this.renderedRevision_ = -1;
frameState.animate = true;
}
}
this.updateUsedTiles(frameState.usedTiles, tileSource, z, tileRange);
const tileTextureQueue = mapRenderer.getTileTextureQueue();
this.manageTilePyramid(
frameState, tileSource, tileGrid, pixelRatio, projection, extent, z,
tileLayer.getPreload(),
/**
* @param {import("../../Tile.js").default} tile Tile.
*/
function(tile) {
if (tile.getState() == TileState.LOADED &&
!mapRenderer.isTileTextureLoaded(tile) &&
!tileTextureQueue.isKeyQueued(tile.getKey())) {
tileTextureQueue.enqueue([
tile,
tileGrid.getTileCoordCenter(tile.tileCoord),
tileGrid.getResolution(tile.tileCoord[0]),
tilePixelSize, tileGutter * pixelRatio
]);
}
}, this);
this.scheduleExpireCache(frameState, tileSource);
const texCoordMatrix = this.texCoordMatrix;
resetTransform(texCoordMatrix);
translateTransform(texCoordMatrix,
(Math.round(center[0] / tileResolution) * tileResolution - framebufferExtent[0]) /
(framebufferExtent[2] - framebufferExtent[0]),
(Math.round(center[1] / tileResolution) * tileResolution - framebufferExtent[1]) /
(framebufferExtent[3] - framebufferExtent[1]));
if (viewState.rotation !== 0) {
rotateTransform(texCoordMatrix, viewState.rotation);
}
scaleTransform(texCoordMatrix,
frameState.size[0] * viewState.resolution /
(framebufferExtent[2] - framebufferExtent[0]),
frameState.size[1] * viewState.resolution /
(framebufferExtent[3] - framebufferExtent[1]));
translateTransform(texCoordMatrix, -0.5, -0.5);
return true;
}
/**
* @inheritDoc
*/
forEachLayerAtPixel(pixel, frameState, callback, thisArg) {
if (!this.framebuffer) {
return undefined;
}
const pixelOnMapScaled = [
pixel[0] / frameState.size[0],
(frameState.size[1] - pixel[1]) / frameState.size[1]];
const pixelOnFrameBufferScaled = applyTransform(
this.texCoordMatrix, pixelOnMapScaled.slice());
const pixelOnFrameBuffer = [
pixelOnFrameBufferScaled[0] * this.framebufferDimension,
pixelOnFrameBufferScaled[1] * this.framebufferDimension];
const gl = this.mapRenderer.getContext().getGL();
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
const imageData = new Uint8Array(4);
gl.readPixels(pixelOnFrameBuffer[0], pixelOnFrameBuffer[1], 1, 1,
gl.RGBA, gl.UNSIGNED_BYTE, imageData);
if (imageData[3] > 0) {
return callback.call(thisArg, this.getLayer(), imageData);
} else {
return undefined;
}
}
}
export default WebGLTileLayerRenderer;

View File

@@ -1,306 +0,0 @@
/**
* @module ol/renderer/webgl/VectorLayer
*/
import {getUid} from '../../util.js';
import ViewHint from '../../ViewHint.js';
import {buffer, containsExtent, createEmpty} from '../../extent.js';
import WebGLReplayGroup from '../../render/webgl/ReplayGroup.js';
import {defaultOrder as defaultRenderOrder, getTolerance as getRenderTolerance, getSquaredTolerance as getSquaredRenderTolerance, renderFeature} from '../vector.js';
import WebGLLayerRenderer from './Layer.js';
import {apply as applyTransform} from '../../transform.js';
/**
* @classdesc
* WebGL renderer for vector layers.
* @api
*/
class WebGLVectorLayerRenderer extends WebGLLayerRenderer {
/**
* @param {import("./Map.js").default} mapRenderer Map renderer.
* @param {import("../../layer/Vector.js").default} vectorLayer Vector layer.
*/
constructor(mapRenderer, vectorLayer) {
super(mapRenderer, vectorLayer);
/**
* @private
* @type {boolean}
*/
this.dirty_ = false;
/**
* @private
* @type {number}
*/
this.renderedRevision_ = -1;
/**
* @private
* @type {number}
*/
this.renderedResolution_ = NaN;
/**
* @private
* @type {import("../../extent.js").Extent}
*/
this.renderedExtent_ = createEmpty();
/**
* @private
* @type {function(import("../../Feature.js").default, import("../../Feature.js").default): number|null}
*/
this.renderedRenderOrder_ = null;
/**
* @private
* @type {import("../../render/webgl/ReplayGroup.js").default}
*/
this.replayGroup_ = null;
/**
* The last layer state.
* @private
* @type {?import("../../layer/Layer.js").State}
*/
this.layerState_ = null;
}
/**
* @inheritDoc
*/
composeFrame(frameState, layerState, context) {
this.layerState_ = layerState;
const viewState = frameState.viewState;
const replayGroup = this.replayGroup_;
const size = frameState.size;
const pixelRatio = frameState.pixelRatio;
const gl = this.mapRenderer.getGL();
if (replayGroup && !replayGroup.isEmpty()) {
gl.enable(gl.SCISSOR_TEST);
gl.scissor(0, 0, size[0] * pixelRatio, size[1] * pixelRatio);
replayGroup.replay(context,
viewState.center, viewState.resolution, viewState.rotation,
size, pixelRatio, layerState.opacity,
layerState.managed ? frameState.skippedFeatureUids : {});
gl.disable(gl.SCISSOR_TEST);
}
}
/**
* @inheritDoc
*/
disposeInternal() {
const replayGroup = this.replayGroup_;
if (replayGroup) {
const context = this.mapRenderer.getContext();
replayGroup.getDeleteResourcesFunction(context)();
this.replayGroup_ = null;
}
super.disposeInternal();
}
/**
* @inheritDoc
*/
forEachFeatureAtCoordinate(coordinate, frameState, hitTolerance, callback, thisArg) {
if (!this.replayGroup_ || !this.layerState_) {
return undefined;
} else {
const context = this.mapRenderer.getContext();
const viewState = frameState.viewState;
const layer = this.getLayer();
const layerState = this.layerState_;
/** @type {!Object<string, boolean>} */
const features = {};
return this.replayGroup_.forEachFeatureAtCoordinate(coordinate,
context, viewState.center, viewState.resolution, viewState.rotation,
frameState.size, frameState.pixelRatio, layerState.opacity,
{},
/**
* @param {import("../../Feature.js").FeatureLike} feature Feature.
* @return {?} Callback result.
*/
function(feature) {
const key = getUid(feature);
if (!(key in features)) {
features[key] = true;
return callback.call(thisArg, feature, layer);
}
});
}
}
/**
* @inheritDoc
*/
hasFeatureAtCoordinate(coordinate, frameState) {
if (!this.replayGroup_ || !this.layerState_) {
return false;
} else {
const context = this.mapRenderer.getContext();
const viewState = frameState.viewState;
const layerState = this.layerState_;
return this.replayGroup_.hasFeatureAtCoordinate(coordinate,
context, viewState.center, viewState.resolution, viewState.rotation,
frameState.size, frameState.pixelRatio, layerState.opacity,
frameState.skippedFeatureUids);
}
}
/**
* @inheritDoc
*/
forEachLayerAtPixel(pixel, frameState, callback, thisArg) {
const coordinate = applyTransform(
frameState.pixelToCoordinateTransform, pixel.slice());
const hasFeature = this.hasFeatureAtCoordinate(coordinate, frameState);
if (hasFeature) {
return callback.call(thisArg, this.getLayer(), null);
} else {
return undefined;
}
}
/**
* Handle changes in image style state.
* @param {import("../../events/Event.js").default} event Image style change event.
* @private
*/
handleStyleImageChange_(event) {
this.renderIfReadyAndVisible();
}
/**
* @inheritDoc
*/
prepareFrame(frameState, layerState, context) {
const vectorLayer = /** @type {import("../../layer/Vector.js").default} */ (this.getLayer());
const vectorSource = /** @type {import("../../source/Vector.js").default} */ (vectorLayer.getSource());
const animating = frameState.viewHints[ViewHint.ANIMATING];
const interacting = frameState.viewHints[ViewHint.INTERACTING];
const updateWhileAnimating = vectorLayer.getUpdateWhileAnimating();
const updateWhileInteracting = vectorLayer.getUpdateWhileInteracting();
if (!this.dirty_ && (!updateWhileAnimating && animating) ||
(!updateWhileInteracting && interacting)) {
return true;
}
const frameStateExtent = frameState.extent;
const viewState = frameState.viewState;
const projection = viewState.projection;
const resolution = viewState.resolution;
const pixelRatio = frameState.pixelRatio;
const vectorLayerRevision = vectorLayer.getRevision();
const vectorLayerRenderBuffer = vectorLayer.getRenderBuffer();
let vectorLayerRenderOrder = vectorLayer.getRenderOrder();
if (vectorLayerRenderOrder === undefined) {
vectorLayerRenderOrder = defaultRenderOrder;
}
const extent = buffer(frameStateExtent,
vectorLayerRenderBuffer * resolution);
if (!this.dirty_ &&
this.renderedResolution_ == resolution &&
this.renderedRevision_ == vectorLayerRevision &&
this.renderedRenderOrder_ == vectorLayerRenderOrder &&
containsExtent(this.renderedExtent_, extent)) {
return true;
}
if (this.replayGroup_) {
frameState.postRenderFunctions.push(
this.replayGroup_.getDeleteResourcesFunction(context));
}
this.dirty_ = false;
const replayGroup = new WebGLReplayGroup(
getRenderTolerance(resolution, pixelRatio),
extent, vectorLayer.getRenderBuffer());
vectorSource.loadFeatures(extent, resolution, projection);
/**
* @param {import("../../Feature.js").default} feature Feature.
* @this {WebGLVectorLayerRenderer}
*/
const render = function(feature) {
let styles;
const styleFunction = feature.getStyleFunction() || vectorLayer.getStyleFunction();
if (styleFunction) {
styles = styleFunction(feature, resolution);
}
if (styles) {
const dirty = this.renderFeature(
feature, resolution, pixelRatio, styles, replayGroup);
this.dirty_ = this.dirty_ || dirty;
}
}.bind(this);
if (vectorLayerRenderOrder) {
/** @type {Array<import("../../Feature.js").default>} */
const features = [];
vectorSource.forEachFeatureInExtent(extent,
/**
* @param {import("../../Feature.js").default} feature Feature.
*/
function(feature) {
features.push(feature);
});
features.sort(vectorLayerRenderOrder);
features.forEach(render.bind(this));
} else {
vectorSource.forEachFeatureInExtent(extent, render);
}
replayGroup.finish(context);
this.renderedResolution_ = resolution;
this.renderedRevision_ = vectorLayerRevision;
this.renderedRenderOrder_ = vectorLayerRenderOrder;
this.renderedExtent_ = extent;
this.replayGroup_ = replayGroup;
return true;
}
/**
* @param {import("../../Feature.js").default} feature Feature.
* @param {number} resolution Resolution.
* @param {number} pixelRatio Pixel ratio.
* @param {import("../../style/Style.js").default|Array<import("../../style/Style.js").default>} styles The style or array of
* styles.
* @param {import("../../render/webgl/ReplayGroup.js").default} replayGroup Replay group.
* @return {boolean} `true` if an image is loading.
*/
renderFeature(feature, resolution, pixelRatio, styles, replayGroup) {
if (!styles) {
return false;
}
let loading = false;
if (Array.isArray(styles)) {
for (let i = styles.length - 1, ii = 0; i >= ii; --i) {
loading = renderFeature(
replayGroup, feature, styles[i],
getSquaredRenderTolerance(resolution, pixelRatio),
this.handleStyleImageChange_, this) || loading;
}
} else {
loading = renderFeature(
replayGroup, feature, styles,
getSquaredRenderTolerance(resolution, pixelRatio),
this.handleStyleImageChange_, this) || loading;
}
return loading;
}
}
export default WebGLVectorLayerRenderer;

View File

@@ -1,29 +0,0 @@
//! MODULE=ol/renderer/webgl/defaultmapshader
//! COMMON
varying vec2 v_texCoord;
//! VERTEX
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform mat4 u_texCoordMatrix;
uniform mat4 u_projectionMatrix;
void main(void) {
gl_Position = u_projectionMatrix * vec4(a_position, 0., 1.);
v_texCoord = (u_texCoordMatrix * vec4(a_texCoord, 0., 1.)).st;
}
//! FRAGMENT
uniform float u_opacity;
uniform sampler2D u_texture;
void main(void) {
vec4 texColor = texture2D(u_texture, v_texCoord);
gl_FragColor.rgb = texColor.rgb;
gl_FragColor.a = texColor.a * u_opacity;
}

View File

@@ -1,17 +0,0 @@
/**
* @module ol/renderer/webgl/defaultmapshader
*/
// This file is automatically generated, do not edit.
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../webgl.js';
import WebGLFragment from '../../webgl/Fragment.js';
import WebGLVertex from '../../webgl/Vertex.js';
export const fragment = new WebGLFragment(DEBUG_WEBGL ?
'precision mediump float;\nvarying vec2 v_texCoord;\n\n\nuniform float u_opacity;\nuniform sampler2D u_texture;\n\nvoid main(void) {\n vec4 texColor = texture2D(u_texture, v_texCoord);\n gl_FragColor.rgb = texColor.rgb;\n gl_FragColor.a = texColor.a * u_opacity;\n}\n' :
'precision mediump float;varying vec2 a;uniform float f;uniform sampler2D g;void main(void){vec4 texColor=texture2D(g,a);gl_FragColor.rgb=texColor.rgb;gl_FragColor.a=texColor.a*f;}');
export const vertex = new WebGLVertex(DEBUG_WEBGL ?
'varying vec2 v_texCoord;\n\n\nattribute vec2 a_position;\nattribute vec2 a_texCoord;\n\nuniform mat4 u_texCoordMatrix;\nuniform mat4 u_projectionMatrix;\n\nvoid main(void) {\n gl_Position = u_projectionMatrix * vec4(a_position, 0., 1.);\n v_texCoord = (u_texCoordMatrix * vec4(a_texCoord, 0., 1.)).st;\n}\n\n\n' :
'varying vec2 a;attribute vec2 b;attribute vec2 c;uniform mat4 d;uniform mat4 e;void main(void){gl_Position=e*vec4(b,0.,1.);a=(d*vec4(c,0.,1.)).st;}');

View File

@@ -1,57 +0,0 @@
/**
* @module ol/renderer/webgl/defaultmapshader/Locations
*/
// This file is automatically generated, do not edit
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../webgl.js';
class Locations {
/**
* @param {WebGLRenderingContext} gl GL.
* @param {WebGLProgram} program Program.
*/
constructor(gl, program) {
/**
* @type {WebGLUniformLocation}
*/
this.u_texCoordMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_texCoordMatrix' : 'd');
/**
* @type {WebGLUniformLocation}
*/
this.u_projectionMatrix = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_projectionMatrix' : 'e');
/**
* @type {WebGLUniformLocation}
*/
this.u_opacity = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_opacity' : 'f');
/**
* @type {WebGLUniformLocation}
*/
this.u_texture = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_texture' : 'g');
/**
* @type {number}
*/
this.a_position = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_position' : 'b');
/**
* @type {number}
*/
this.a_texCoord = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_texCoord' : 'c');
}
}
export default Locations;

View File

@@ -1,24 +0,0 @@
//! MODULE=ol/renderer/webgl/tilelayershader
//! COMMON
varying vec2 v_texCoord;
//! VERTEX
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec4 u_tileOffset;
void main(void) {
gl_Position = vec4(a_position * u_tileOffset.xy + u_tileOffset.zw, 0., 1.);
v_texCoord = a_texCoord;
}
//! FRAGMENT
uniform sampler2D u_texture;
void main(void) {
gl_FragColor = texture2D(u_texture, v_texCoord);
}

View File

@@ -1,17 +0,0 @@
/**
* @module ol/renderer/webgl/tilelayershader
*/
// This file is automatically generated, do not edit.
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../webgl.js';
import WebGLFragment from '../../webgl/Fragment.js';
import WebGLVertex from '../../webgl/Vertex.js';
export const fragment = new WebGLFragment(DEBUG_WEBGL ?
'precision mediump float;\nvarying vec2 v_texCoord;\n\n\nuniform sampler2D u_texture;\n\nvoid main(void) {\n gl_FragColor = texture2D(u_texture, v_texCoord);\n}\n' :
'precision mediump float;varying vec2 a;uniform sampler2D e;void main(void){gl_FragColor=texture2D(e,a);}');
export const vertex = new WebGLVertex(DEBUG_WEBGL ?
'varying vec2 v_texCoord;\n\n\nattribute vec2 a_position;\nattribute vec2 a_texCoord;\nuniform vec4 u_tileOffset;\n\nvoid main(void) {\n gl_Position = vec4(a_position * u_tileOffset.xy + u_tileOffset.zw, 0., 1.);\n v_texCoord = a_texCoord;\n}\n\n\n' :
'varying vec2 a;attribute vec2 b;attribute vec2 c;uniform vec4 d;void main(void){gl_Position=vec4(b*d.xy+d.zw,0.,1.);a=c;}');

View File

@@ -1,45 +0,0 @@
/**
* @module ol/renderer/webgl/tilelayershader/Locations
*/
// This file is automatically generated, do not edit
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../webgl.js';
class Locations {
/**
* @param {WebGLRenderingContext} gl GL.
* @param {WebGLProgram} program Program.
*/
constructor(gl, program) {
/**
* @type {WebGLUniformLocation}
*/
this.u_tileOffset = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_tileOffset' : 'd');
/**
* @type {WebGLUniformLocation}
*/
this.u_texture = gl.getUniformLocation(
program, DEBUG_WEBGL ? 'u_texture' : 'e');
/**
* @type {number}
*/
this.a_position = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_position' : 'b');
/**
* @type {number}
*/
this.a_texCoord = gl.getAttribLocation(
program, DEBUG_WEBGL ? 'a_texCoord' : 'c');
}
}
export default Locations;

View File

@@ -11,9 +11,8 @@ import XYZ from './XYZ.js';
* @property {import("./Source.js").AttributionLike} [attributions] Attributions.
* @property {number} [cacheSize=2048] Cache size.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {import("../proj.js").ProjectionLike} [projection='EPSG:3857'] Projection.
* @property {number} [maxZoom=18] Max zoom.
* @property {number} [minZoom] Minimum zoom.

View File

@@ -16,9 +16,8 @@ import {appendParams} from '../uri.js';
* @typedef {Object} Options
* @property {import("./Source.js").AttributionLike} [attributions] Attributions.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image} for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {boolean} [hidpi=true] Use the `ol/Map#pixelRatio` value when requesting the image from
* the remote server.
* @property {import("../Image.js").LoadFunction} [imageLoadFunction] Optional function to load an image given

View File

@@ -14,9 +14,8 @@ import {appendParams} from '../uri.js';
* @typedef {Object} Options
* @property {string} [url] The mapagent url.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {number} [displayDpi=96] The display resolution.
* @property {number} [metersPerUnit=1] The meters-per-unit value.
* @property {boolean} [hidpi=true] Use the `ol/Map#pixelRatio` value when requesting

View File

@@ -15,9 +15,8 @@ import ImageSource, {defaultImageLoadFunction} from './Image.js';
* @typedef {Object} Options
* @property {import("./Source.js").AttributionLike} [attributions] Attributions.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {import("../extent.js").Extent} [imageExtent] Extent of the image in map coordinates.
* This is the [left, bottom, right, top] map coordinates of your image.
* @property {import("../Image.js").LoadFunction} [imageLoadFunction] Optional function to load an image given a URL.

View File

@@ -29,9 +29,8 @@ const GETFEATUREINFO_IMAGE_SIZE = [101, 101];
* @typedef {Object} Options
* @property {import("./Source.js").AttributionLike} [attributions] Attributions.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {boolean} [hidpi=true] Use the `ol/Map#pixelRatio` value when requesting
* the image from the remote server.
* @property {import("./WMSServerType.js").default|string} [serverType] The type of

View File

@@ -22,9 +22,8 @@ export const ATTRIBUTION = '&#169; ' +
* @property {import("./Source.js").AttributionLike} [attributions] Attributions.
* @property {number} [cacheSize=2048] Cache size.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {number} [maxZoom=19] Max zoom.
* @property {boolean} [opaque=true] Whether the layer is opaque.
* @property {number} [reprojectionErrorThreshold=1.5] Maximum allowed reprojection error (in pixels).

View File

@@ -14,11 +14,9 @@ import {appendParams} from '../uri.js';
* @typedef {Object} Options
* @property {import("./Source.js").AttributionLike} [attributions] Attributions.
* @property {number} [cacheSize=2048] Cache size.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images.
* Note that you must provide a `crossOrigin` value if you are using the WebGL renderer
* or if you want to access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image
* for more detail.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {Object<string,*>} [params] ArcGIS Rest parameters. This field is optional. Service defaults will be
* used for any fields not specified. `FORMAT` is `PNG32` by default. `F` is `IMAGE` by
* default. `TRANSPARENT` is `true` by default. `BBOX`, `SIZE`, `BBOXSR`,

View File

@@ -20,9 +20,8 @@ import {getForProjection as getTileGridForProjection} from '../tilegrid.js';
* @property {boolean} [attributionsCollapsible=true] Attributions are collapsible.
* @property {number} [cacheSize=2048] Cache size.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {boolean} [opaque=true] Whether the layer is opaque.
* @property {import("../proj.js").ProjectionLike} projection Projection.
* @property {number} [reprojectionErrorThreshold=0.5] Maximum allowed reprojection error (in pixels).

View File

@@ -41,9 +41,8 @@ import {createXYZ, extentFromProjection} from '../tilegrid.js';
* @property {import("./Source.js").AttributionLike} [attributions] Attributions.
* @property {number} [cacheSize=2048] Cache size.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {boolean} [jsonp=false] Use JSONP with callback to load the TileJSON.
* Useful when the server does not support CORS..
* @property {number} [reprojectionErrorThreshold=0.5] Maximum allowed reprojection error (in pixels).

View File

@@ -22,9 +22,8 @@ import {appendParams} from '../uri.js';
* @property {import("./Source.js").AttributionLike} [attributions] Attributions.
* @property {number} [cacheSize=2048] Cache size.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {Object<string,*>} params WMS request parameters.
* At least a `LAYERS` param is required. `STYLES` is
* `''` by default. `VERSION` is `1.3.0` by default. `WIDTH`, `HEIGHT`, `BBOX`

View File

@@ -17,9 +17,8 @@ import {appendParams} from '../uri.js';
* @property {import("./Source.js").AttributionLike} [attributions] Attributions.
* @property {number} [cacheSize=2048] Cache size.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {import("../tilegrid/WMTS.js").default} tileGrid Tile grid.
* @property {import("../proj.js").ProjectionLike} projection Projection.
* @property {number} [reprojectionErrorThreshold=0.5] Maximum allowed reprojection error (in pixels).

View File

@@ -11,9 +11,8 @@ import {createXYZ, extentFromProjection} from '../tilegrid.js';
* @property {boolean} [attributionsCollapsible=true] Attributions are collapsible.
* @property {number} [cacheSize=2048] Cache size.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {boolean} [opaque=true] Whether the layer is opaque.
* @property {import("../proj.js").ProjectionLike} [projection='EPSG:3857'] Projection.
* @property {number} [reprojectionErrorThreshold=0.5] Maximum allowed reprojection error (in pixels).

View File

@@ -84,9 +84,8 @@ export class CustomTile extends ImageTile {
* @property {import("./Source.js").AttributionLike} [attributions] Attributions.
* @property {number} [cacheSize=2048] Cache size.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that
* you must provide a `crossOrigin` value if you are using the WebGL renderer or if you want to
* access pixel data with the Canvas renderer. See
* https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* you must provide a `crossOrigin` value you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {import("../proj.js").ProjectionLike} [projection] Projection.
* @property {number} [reprojectionErrorThreshold=0.5] Maximum allowed reprojection error (in pixels).
* Higher values can increase reprojection performance, but decrease precision.

View File

@@ -27,7 +27,7 @@ import ImageStyle from './Image.js';
* @property {import("../color.js").Color|string} [color] Color to tint the icon. If not specified,
* the icon will be left as is.
* @property {null|string} [crossOrigin] The `crossOrigin` attribute for loaded images. Note that you must provide a
* `crossOrigin` value if you are using the WebGL renderer or if you want to access pixel data with the Canvas renderer.
* `crossOrigin` value if you want to access pixel data with the Canvas renderer.
* See https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for more detail.
* @property {HTMLImageElement|HTMLCanvasElement} [img] Image object for the icon. If the `src` option is not provided then the
* provided image must already be loaded. And in that case, it is required

View File

@@ -13,7 +13,7 @@ const BufferUsage = {
};
class WebGLBuffer {
class WebGLArrayBuffer {
/**
* @param {Array<number>=} opt_arr Array.
@@ -50,4 +50,4 @@ class WebGLBuffer {
}
}
export default WebGLBuffer;
export default WebGLArrayBuffer;

View File

@@ -1,361 +0,0 @@
/**
* @module ol/webgl/Context
*/
import {getUid} from '../util.js';
import {EXTENSIONS as WEBGL_EXTENSIONS} from '../webgl.js';
import Disposable from '../Disposable.js';
import {includes} from '../array.js';
import {listen, unlistenAll} from '../events.js';
import {clear} from '../obj.js';
import {ARRAY_BUFFER, ELEMENT_ARRAY_BUFFER, TEXTURE_2D, TEXTURE_WRAP_S, TEXTURE_WRAP_T} from '../webgl.js';
import ContextEventType from '../webgl/ContextEventType.js';
/**
* @typedef {Object} BufferCacheEntry
* @property {import("./Buffer.js").default} buf
* @property {WebGLBuffer} buffer
*/
/**
* @classdesc
* A WebGL context for accessing low-level WebGL capabilities.
*/
class WebGLContext extends Disposable {
/**
* @param {HTMLCanvasElement} canvas Canvas.
* @param {WebGLRenderingContext} gl GL.
*/
constructor(canvas, gl) {
super();
/**
* @private
* @type {HTMLCanvasElement}
*/
this.canvas_ = canvas;
/**
* @private
* @type {WebGLRenderingContext}
*/
this.gl_ = gl;
/**
* @private
* @type {!Object<string, BufferCacheEntry>}
*/
this.bufferCache_ = {};
/**
* @private
* @type {!Object<string, WebGLShader>}
*/
this.shaderCache_ = {};
/**
* @private
* @type {!Object<string, WebGLProgram>}
*/
this.programCache_ = {};
/**
* @private
* @type {WebGLProgram}
*/
this.currentProgram_ = null;
/**
* @private
* @type {WebGLFramebuffer}
*/
this.hitDetectionFramebuffer_ = null;
/**
* @private
* @type {WebGLTexture}
*/
this.hitDetectionTexture_ = null;
/**
* @private
* @type {WebGLRenderbuffer}
*/
this.hitDetectionRenderbuffer_ = null;
/**
* @type {boolean}
*/
this.hasOESElementIndexUint = includes(WEBGL_EXTENSIONS, 'OES_element_index_uint');
// use the OES_element_index_uint extension if available
if (this.hasOESElementIndexUint) {
gl.getExtension('OES_element_index_uint');
}
listen(this.canvas_, ContextEventType.LOST,
this.handleWebGLContextLost, this);
listen(this.canvas_, ContextEventType.RESTORED,
this.handleWebGLContextRestored, this);
}
/**
* Just bind the buffer if it's in the cache. Otherwise create
* the WebGL buffer, bind it, populate it, and add an entry to
* the cache.
* @param {number} target Target.
* @param {import("./Buffer.js").default} buf Buffer.
*/
bindBuffer(target, buf) {
const gl = this.getGL();
const arr = buf.getArray();
const bufferKey = getUid(buf);
if (bufferKey in this.bufferCache_) {
const bufferCacheEntry = this.bufferCache_[bufferKey];
gl.bindBuffer(target, bufferCacheEntry.buffer);
} else {
const buffer = gl.createBuffer();
gl.bindBuffer(target, buffer);
let /** @type {ArrayBufferView} */ arrayBuffer;
if (target == ARRAY_BUFFER) {
arrayBuffer = new Float32Array(arr);
} else if (target == ELEMENT_ARRAY_BUFFER) {
arrayBuffer = this.hasOESElementIndexUint ?
new Uint32Array(arr) : new Uint16Array(arr);
}
gl.bufferData(target, arrayBuffer, buf.getUsage());
this.bufferCache_[bufferKey] = {
buf: buf,
buffer: buffer
};
}
}
/**
* @param {import("./Buffer.js").default} buf Buffer.
*/
deleteBuffer(buf) {
const gl = this.getGL();
const bufferKey = getUid(buf);
const bufferCacheEntry = this.bufferCache_[bufferKey];
if (!gl.isContextLost()) {
gl.deleteBuffer(bufferCacheEntry.buffer);
}
delete this.bufferCache_[bufferKey];
}
/**
* @inheritDoc
*/
disposeInternal() {
unlistenAll(this.canvas_);
const gl = this.getGL();
if (!gl.isContextLost()) {
for (const key in this.bufferCache_) {
gl.deleteBuffer(this.bufferCache_[key].buffer);
}
for (const key in this.programCache_) {
gl.deleteProgram(this.programCache_[key]);
}
for (const key in this.shaderCache_) {
gl.deleteShader(this.shaderCache_[key]);
}
// delete objects for hit-detection
gl.deleteFramebuffer(this.hitDetectionFramebuffer_);
gl.deleteRenderbuffer(this.hitDetectionRenderbuffer_);
gl.deleteTexture(this.hitDetectionTexture_);
}
}
/**
* @return {HTMLCanvasElement} Canvas.
*/
getCanvas() {
return this.canvas_;
}
/**
* Get the WebGL rendering context
* @return {WebGLRenderingContext} The rendering context.
* @api
*/
getGL() {
return this.gl_;
}
/**
* Get the frame buffer for hit detection.
* @return {WebGLFramebuffer} The hit detection frame buffer.
*/
getHitDetectionFramebuffer() {
if (!this.hitDetectionFramebuffer_) {
this.initHitDetectionFramebuffer_();
}
return this.hitDetectionFramebuffer_;
}
/**
* Get shader from the cache if it's in the cache. Otherwise, create
* the WebGL shader, compile it, and add entry to cache.
* @param {import("./Shader.js").default} shaderObject Shader object.
* @return {WebGLShader} Shader.
*/
getShader(shaderObject) {
const shaderKey = getUid(shaderObject);
if (shaderKey in this.shaderCache_) {
return this.shaderCache_[shaderKey];
} else {
const gl = this.getGL();
const shader = gl.createShader(shaderObject.getType());
gl.shaderSource(shader, shaderObject.getSource());
gl.compileShader(shader);
this.shaderCache_[shaderKey] = shader;
return shader;
}
}
/**
* Get the program from the cache if it's in the cache. Otherwise create
* the WebGL program, attach the shaders to it, and add an entry to the
* cache.
* @param {import("./Fragment.js").default} fragmentShaderObject Fragment shader.
* @param {import("./Vertex.js").default} vertexShaderObject Vertex shader.
* @return {WebGLProgram} Program.
*/
getProgram(fragmentShaderObject, vertexShaderObject) {
const programKey = getUid(fragmentShaderObject) + '/' + getUid(vertexShaderObject);
if (programKey in this.programCache_) {
return this.programCache_[programKey];
} else {
const gl = this.getGL();
const program = gl.createProgram();
gl.attachShader(program, this.getShader(fragmentShaderObject));
gl.attachShader(program, this.getShader(vertexShaderObject));
gl.linkProgram(program);
this.programCache_[programKey] = program;
return program;
}
}
/**
* FIXME empty description for jsdoc
*/
handleWebGLContextLost() {
clear(this.bufferCache_);
clear(this.shaderCache_);
clear(this.programCache_);
this.currentProgram_ = null;
this.hitDetectionFramebuffer_ = null;
this.hitDetectionTexture_ = null;
this.hitDetectionRenderbuffer_ = null;
}
/**
* FIXME empty description for jsdoc
*/
handleWebGLContextRestored() {
}
/**
* Creates a 1x1 pixel framebuffer for the hit-detection.
* @private
*/
initHitDetectionFramebuffer_() {
const gl = this.gl_;
const framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
const texture = createEmptyTexture(gl, 1, 1);
const renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, 1, 1);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT,
gl.RENDERBUFFER, renderbuffer);
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindRenderbuffer(gl.RENDERBUFFER, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
this.hitDetectionFramebuffer_ = framebuffer;
this.hitDetectionTexture_ = texture;
this.hitDetectionRenderbuffer_ = renderbuffer;
}
/**
* Use a program. If the program is already in use, this will return `false`.
* @param {WebGLProgram} program Program.
* @return {boolean} Changed.
* @api
*/
useProgram(program) {
if (program == this.currentProgram_) {
return false;
} else {
const gl = this.getGL();
gl.useProgram(program);
this.currentProgram_ = program;
return true;
}
}
}
/**
* @param {WebGLRenderingContext} gl WebGL rendering context.
* @param {number=} opt_wrapS wrapS.
* @param {number=} opt_wrapT wrapT.
* @return {WebGLTexture} The texture.
*/
function createTextureInternal(gl, opt_wrapS, opt_wrapT) {
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
if (opt_wrapS !== undefined) {
gl.texParameteri(
TEXTURE_2D, TEXTURE_WRAP_S, opt_wrapS);
}
if (opt_wrapT !== undefined) {
gl.texParameteri(
TEXTURE_2D, TEXTURE_WRAP_T, opt_wrapT);
}
return texture;
}
/**
* @param {WebGLRenderingContext} gl WebGL rendering context.
* @param {number} width Width.
* @param {number} height Height.
* @param {number=} opt_wrapS wrapS.
* @param {number=} opt_wrapT wrapT.
* @return {WebGLTexture} The texture.
*/
export function createEmptyTexture(gl, width, height, opt_wrapS, opt_wrapT) {
const texture = createTextureInternal(gl, opt_wrapS, opt_wrapT);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
return texture;
}
/**
* @param {WebGLRenderingContext} gl WebGL rendering context.
* @param {HTMLCanvasElement|HTMLImageElement|HTMLVideoElement} image Image.
* @param {number=} opt_wrapS wrapS.
* @param {number=} opt_wrapT wrapT.
* @return {WebGLTexture} The texture.
*/
export function createTexture(gl, image, opt_wrapS, opt_wrapT) {
const texture = createTextureInternal(gl, opt_wrapS, opt_wrapT);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
return texture;
}
export default WebGLContext;

757
src/ol/webgl/Helper.js Normal file
View File

@@ -0,0 +1,757 @@
/**
* @module ol/webgl/Helper
*/
import {getUid} from '../util.js';
import {EXTENSIONS as WEBGL_EXTENSIONS} from '../webgl.js';
import Disposable from '../Disposable.js';
import {includes} from '../array.js';
import {listen, unlistenAll} from '../events.js';
import {clear} from '../obj.js';
import {ARRAY_BUFFER, ELEMENT_ARRAY_BUFFER, TEXTURE_2D, TEXTURE_WRAP_S, TEXTURE_WRAP_T} from '../webgl.js';
import ContextEventType from '../webgl/ContextEventType.js';
import {
create as createTransform,
reset as resetTransform,
rotate as rotateTransform,
scale as scaleTransform,
translate as translateTransform
} from '../transform';
import {create, fromTransform} from '../vec/mat4';
import WebGLPostProcessingPass from './PostProcessingPass';
/**
* @typedef {Object} BufferCacheEntry
* @property {import("./Buffer.js").default} buf
* @property {WebGLBuffer} buffer
*/
/**
* Uniform names used in the default shaders.
* @const
* @type {Object.<string,string>}
*/
export const DefaultUniform = {
PROJECTION_MATRIX: 'u_projectionMatrix',
OFFSET_SCALE_MATRIX: 'u_offsetScaleMatrix',
OFFSET_ROTATION_MATRIX: 'u_offsetRotateMatrix',
OPACITY: 'u_opacity'
};
/**
* Attribute names used in the default shaders.
* @const
* @type {Object.<string,string>}
*/
export const DefaultAttrib = {
POSITION: 'a_position',
TEX_COORD: 'a_texCoord',
OPACITY: 'a_opacity',
ROTATE_WITH_VIEW: 'a_rotateWithView',
OFFSETS: 'a_offsets'
};
/**
* @typedef {number|Array<number>|HTMLCanvasElement|HTMLImageElement|HTMLVideoElement} UniformLiteralValue
*/
/**
* Uniform value can be a number, array of numbers (2 to 4), canvas element or a callback returning
* one of the previous types.
* @typedef {UniformLiteralValue|function(import("../PluggableMap.js").FrameState):UniformLiteralValue} UniformValue
*/
/**
* @typedef {Object} PostProcessesOptions
* @property {number} [scaleRatio] Scale ratio; if < 1, the post process will render to a texture smaller than
* the main canvas that will then be sampled up (useful for saving resource on blur steps).
* @property {string} [vertexShader] Vertex shader source
* @property {string} [fragmentShader] Fragment shader source
* @property {Object.<string,UniformValue>} [uniforms] Uniform definitions for the post process step
*/
/**
* @typedef {Object} Options
* @property {Object.<string,UniformValue>} [uniforms] Uniform definitions; property namesmust math the uniform
* names in the provided or default shaders.
* @property {Array<PostProcessesOptions>} [postProcesses] Post-processes definitions
*/
/**
* @typedef {Object} UniformInternalDescription
* @property {string} name Name
* @property {WebGLTexture} [texture] Texture
* @private
*/
/**
* @classdesc
* This class is intended to provide low-level functions related to WebGL rendering, so that accessing
* directly the WebGL API should not be required anymore.
*
* Several operations are handled by the `WebGLHelper` class:
*
* ### Define custom shaders and uniforms
*
* *Shaders* are low-level programs executed on the GPU and written in GLSL. There are two types of shaders:
*
* Vertex shaders are used to manipulate the position and attribute of *vertices* of rendered primitives (ie. corners of a square).
* Outputs are:
*
* * `gl_Position`: position of the vertex in screen space
*
* * Varyings usually prefixed with `v_` are passed on to the fragment shader
*
* Fragment shaders are used to control the actual color of the pixels rawn on screen. Their only output is `gl_FragColor`.
*
* Both shaders can take *uniforms* or *attributes* as input. Attributes are explained later. Uniforms are common, read-only values that
* can be changed at every frame and can be of type float, arrays of float or images.
*
* Shaders must be compiled and assembled into a program like so:
* ```js
* // here we simply create two shaders and assemble them in a program which is then used
* // for subsequent rendering calls
* const vertexShader = new WebGLVertex(VERTEX_SHADER);
* const fragmentShader = new WebGLFragment(FRAGMENT_SHADER);
* this.program = this.context.getProgram(fragmentShader, vertexShader);
* this.context.useProgram(this.program);
* ```
*
* Uniforms are defined using the `uniforms` option and can either be explicit values or callbacks taking the frame state as argument.
* You can also change their value along the way like so:
* ```js
* this.context.setUniformFloatValue(DefaultUniform.OPACITY, layerState.opacity);
* ```
*
* ### Defining post processing passes
*
* *Post processing* describes the act of rendering primitives to a texture, and then rendering this texture to the final canvas
* while applying special effects in screen space.
* Typical uses are: blurring, color manipulation, depth of field, filtering...
*
* The `WebGLHelper` class offers the possibility to define post processes at creation time using the `postProcesses` option.
* A post process step accepts the following options:
*
* * `fragmentShader` and `vertexShader`: text literals in GLSL language that will be compiled and used in the post processing step.
* * `uniforms`: uniforms can be defined for the post processing steps just like for the main render.
* * `scaleRatio`: allows using an intermediate texture smaller or higher than the final canvas in the post processing step.
* This is typically used in blur steps to reduce the performance overhead by using an already downsampled texture as input.
*
* The {@link module:ol/webgl/PostProcessingPass~WebGLPostProcessingPass} class is used internally, refer to its documentation for more info.
*
* ### Binding WebGL buffers and flushing data into them:
*
* Data that must be passed to the GPU has to be transferred using `WebGLArrayBuffer` objects.
* A buffer has to be created only once, but must be bound everytime the data it holds is changed. Using `WebGLHelper.bindBuffer`
* will bind the buffer and flush the new data to the GPU.
*
* For now, the `WebGLHelper` class expects {@link module:ol/webgl/Buffer~WebGLArrayBuffer} objects.
* ```js
* // at initialization phase
* this.verticesBuffer = new WebGLArrayBuffer([], DYNAMIC_DRAW);
* this.indicesBuffer = new WebGLArrayBuffer([], DYNAMIC_DRAW);
*
* // at rendering phase
* this.context.bindBuffer(ARRAY_BUFFER, this.verticesBuffer);
* this.context.bindBuffer(ELEMENT_ARRAY_BUFFER, this.indicesBuffer);
* ```
*
* ### Specifying attributes
*
* The GPU only receives the data as arrays of numbers. These numbers must be handled differently depending on what it describes (position, texture coordinate...).
* Attributes are used to specify these uses. Use `WebGLHelper.enableAttributeArray` and either
* the default attribute names in {@link module:ol/webgl/Helper~DefaultAttrib} or custom ones.
*
* Please note that you will have to specify the type and offset of the attributes in the data array. You can refer to the documentation of [WebGLRenderingContext.vertexAttribPointer](https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/vertexAttribPointer) for more explanation.
* ```js
* // here we indicate that the data array has the following structure:
* // [posX, posY, offsetX, offsetY, texCoordU, texCoordV, posX, posY, ...]
* let bytesPerFloat = Float32Array.BYTES_PER_ELEMENT;
* this.context.enableAttributeArray(DefaultAttrib.POSITION, 2, FLOAT, bytesPerFloat * 6, 0);
* this.context.enableAttributeArray(DefaultAttrib.OFFSETS, 2, FLOAT, bytesPerFloat * 6, bytesPerFloat * 2);
* this.context.enableAttributeArray(DefaultAttrib.TEX_COORD, 2, FLOAT, bytesPerFloat * 6, bytesPerFloat * 4);
* ```
*
* ### Rendering primitives
*
* Once all the steps above have been achieved, rendering primitives to the screen is done using `WebGLHelper.prepareDraw` `drawElements` and `finalizeDraw`.
* ```js
* // frame preparation step
* this.context.prepareDraw(frameState);
*
* // call this for every data array that has to be rendered on screen
* this.context.drawElements(0, this.indicesBuffer.getArray().length);
*
* // finalize the rendering by applying post processes
* this.context.finalizeDraw(frameState);
* ```
*
* For an example usage of this class, refer to {@link module:ol/renderer/webgl/PointsLayer~WebGLPointsLayerRenderer}.
*
*
* @api
*/
class WebGLHelper extends Disposable {
/**
* @param {Options=} opt_options Options.
*/
constructor(opt_options) {
super();
const options = opt_options || {};
/**
* @private
* @type {HTMLCanvasElement}
*/
this.canvas_ = document.createElement('canvas');
this.canvas_.style.position = 'absolute';
/**
* @private
* @type {WebGLRenderingContext}
*/
this.gl_ = this.canvas_.getContext('webgl');
const gl = this.getGL();
/**
* @private
* @type {!Object<string, BufferCacheEntry>}
*/
this.bufferCache_ = {};
/**
* @private
* @type {!Object<string, WebGLShader>}
*/
this.shaderCache_ = {};
/**
* @private
* @type {!Object<string, WebGLProgram>}
*/
this.programCache_ = {};
/**
* @private
* @type {WebGLProgram}
*/
this.currentProgram_ = null;
/**
* @type {boolean}
*/
this.hasOESElementIndexUint = includes(WEBGL_EXTENSIONS, 'OES_element_index_uint');
// use the OES_element_index_uint extension if available
if (this.hasOESElementIndexUint) {
gl.getExtension('OES_element_index_uint');
}
listen(this.canvas_, ContextEventType.LOST,
this.handleWebGLContextLost, this);
listen(this.canvas_, ContextEventType.RESTORED,
this.handleWebGLContextRestored, this);
/**
* @private
* @type {import("../transform.js").Transform}
*/
this.projectionMatrix_ = createTransform();
/**
* @private
* @type {import("../transform.js").Transform}
*/
this.offsetRotateMatrix_ = createTransform();
/**
* @private
* @type {import("../transform.js").Transform}
*/
this.offsetScaleMatrix_ = createTransform();
/**
* @private
* @type {Array<number>}
*/
this.tmpMat4_ = create();
/**
* @private
* @type {Object.<string, WebGLUniformLocation>}
*/
this.uniformLocations_;
/**
* @private
* @type {Object.<string, number>}
*/
this.attribLocations_;
/**
* Holds info about custom uniforms used in the post processing pass.
* If the uniform is a texture, the WebGL Texture object will be stored here.
* @type {Array<UniformInternalDescription>}
* @private
*/
this.uniforms_ = [];
options.uniforms && Object.keys(options.uniforms).forEach(function(name) {
this.uniforms_.push({
name: name,
value: options.uniforms[name]
});
}.bind(this));
/**
* An array of PostProcessingPass objects is kept in this variable, built from the steps provided in the
* options. If no post process was given, a default one is used (so as not to have to make an exception to
* the frame buffer logic).
* @type {Array<WebGLPostProcessingPass>}
* @private
*/
this.postProcessPasses_ = options.postProcesses ? options.postProcesses.map(function(options) {
return new WebGLPostProcessingPass({
webGlContext: gl,
scaleRatio: options.scaleRatio,
vertexShader: options.vertexShader,
fragmentShader: options.fragmentShader,
uniforms: options.uniforms
});
}) : [new WebGLPostProcessingPass({webGlContext: gl})];
}
/**
* Just bind the buffer if it's in the cache. Otherwise create
* the WebGL buffer, bind it, populate it, and add an entry to
* the cache.
* TODO: improve this, the logic is unclear: we want A/ to bind a buffer and B/ to flush data in it
* @param {number} target Target.
* @param {import("./Buffer").default} buf Buffer.
* @api
*/
bindBuffer(target, buf) {
const gl = this.getGL();
const arr = buf.getArray();
const bufferKey = getUid(buf);
let bufferCache = this.bufferCache_[bufferKey];
if (!bufferCache) {
const buffer = gl.createBuffer();
bufferCache = this.bufferCache_[bufferKey] = {
buf: buf,
buffer: buffer
};
}
gl.bindBuffer(target, bufferCache.buffer);
let /** @type {ArrayBufferView} */ arrayBuffer;
if (target == ARRAY_BUFFER) {
arrayBuffer = new Float32Array(arr);
} else if (target == ELEMENT_ARRAY_BUFFER) {
arrayBuffer = this.hasOESElementIndexUint ?
new Uint32Array(arr) : new Uint16Array(arr);
}
gl.bufferData(target, arrayBuffer, buf.getUsage());
}
/**
* @param {import("./Buffer.js").default} buf Buffer.
*/
deleteBuffer(buf) {
const gl = this.getGL();
const bufferKey = getUid(buf);
const bufferCacheEntry = this.bufferCache_[bufferKey];
if (!gl.isContextLost()) {
gl.deleteBuffer(bufferCacheEntry.buffer);
}
delete this.bufferCache_[bufferKey];
}
/**
* @inheritDoc
*/
disposeInternal() {
unlistenAll(this.canvas_);
const gl = this.getGL();
if (!gl.isContextLost()) {
for (const key in this.bufferCache_) {
gl.deleteBuffer(this.bufferCache_[key].buffer);
}
for (const key in this.programCache_) {
gl.deleteProgram(this.programCache_[key]);
}
for (const key in this.shaderCache_) {
gl.deleteShader(this.shaderCache_[key]);
}
}
}
/**
* Clear the buffer & set the viewport to draw.
* Post process passes will be initialized here, the first one being bound as a render target for
* subsequent draw calls.
* @param {import("../PluggableMap.js").FrameState} frameState current frame state
* @api
*/
prepareDraw(frameState) {
const gl = this.getGL();
const canvas = this.getCanvas();
const size = frameState.size;
const pixelRatio = frameState.pixelRatio;
canvas.width = size[0] * pixelRatio;
canvas.height = size[1] * pixelRatio;
canvas.style.width = size[0] + 'px';
canvas.style.height = size[1] + 'px';
gl.useProgram(this.currentProgram_);
// loop backwards in post processes list
for (let i = this.postProcessPasses_.length - 1; i >= 0; i--) {
this.postProcessPasses_[i].init(frameState);
}
gl.bindTexture(gl.TEXTURE_2D, null);
gl.clearColor(0.0, 0.0, 0.0, 0.0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.enable(gl.BLEND);
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
this.applyFrameState(frameState);
this.applyUniforms(frameState);
}
/**
* Execute a draw call based on the currently bound program, texture, buffers, attributes.
* @param {number} start Start index.
* @param {number} end End index.
* @api
*/
drawElements(start, end) {
const gl = this.getGL();
const elementType = this.hasOESElementIndexUint ?
gl.UNSIGNED_INT : gl.UNSIGNED_SHORT;
const elementSize = this.hasOESElementIndexUint ? 4 : 2;
const numItems = end - start;
const offsetInBytes = start * elementSize;
gl.drawElements(gl.TRIANGLES, numItems, elementType, offsetInBytes);
}
/**
* Apply the successive post process passes which will eventually render to the actual canvas.
* @param {import("../PluggableMap.js").FrameState} frameState current frame state
* @api
*/
finalizeDraw(frameState) {
// apply post processes using the next one as target
for (let i = 0; i < this.postProcessPasses_.length; i++) {
this.postProcessPasses_[i].apply(frameState, this.postProcessPasses_[i + 1] || null);
}
}
/**
* @return {HTMLCanvasElement} Canvas.
* @api
*/
getCanvas() {
return this.canvas_;
}
/**
* Get the WebGL rendering context
* @return {WebGLRenderingContext} The rendering context.
* @api
*/
getGL() {
return this.gl_;
}
/**
* Sets the default matrix uniforms for a given frame state. This is called internally in `prepareDraw`.
* @param {import("../PluggableMap.js").FrameState} frameState Frame state.
* @private
*/
applyFrameState(frameState) {
const size = frameState.size;
const rotation = frameState.viewState.rotation;
const resolution = frameState.viewState.resolution;
const center = frameState.viewState.center;
// set the "uniform" values (coordinates 0,0 are the center of the view)
const projectionMatrix = resetTransform(this.projectionMatrix_);
scaleTransform(projectionMatrix, 2 / (resolution * size[0]), 2 / (resolution * size[1]));
rotateTransform(projectionMatrix, -rotation);
translateTransform(projectionMatrix, -center[0], -center[1]);
const offsetScaleMatrix = resetTransform(this.offsetScaleMatrix_);
scaleTransform(offsetScaleMatrix, 2 / size[0], 2 / size[1]);
const offsetRotateMatrix = resetTransform(this.offsetRotateMatrix_);
if (rotation !== 0) {
rotateTransform(offsetRotateMatrix, -rotation);
}
this.setUniformMatrixValue(DefaultUniform.PROJECTION_MATRIX, fromTransform(this.tmpMat4_, projectionMatrix));
this.setUniformMatrixValue(DefaultUniform.OFFSET_SCALE_MATRIX, fromTransform(this.tmpMat4_, offsetScaleMatrix));
this.setUniformMatrixValue(DefaultUniform.OFFSET_ROTATION_MATRIX, fromTransform(this.tmpMat4_, offsetRotateMatrix));
}
/**
* Sets the custom uniforms based on what was given in the constructor. This is called internally in `prepareDraw`.
* @param {import("../PluggableMap.js").FrameState} frameState Frame state.
* @private
*/
applyUniforms(frameState) {
const gl = this.getGL();
let value;
let textureSlot = 0;
this.uniforms_.forEach(function(uniform) {
value = typeof uniform.value === 'function' ? uniform.value(frameState) : uniform.value;
// apply value based on type
if (value instanceof HTMLCanvasElement || value instanceof ImageData) {
// create a texture & put data
if (!uniform.texture) {
uniform.texture = gl.createTexture();
}
gl.activeTexture(gl[`TEXTURE${textureSlot}`]);
gl.bindTexture(gl.TEXTURE_2D, uniform.texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
if (value instanceof ImageData) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, value.width, value.height, 0,
gl.UNSIGNED_BYTE, new Uint8Array(value.data));
} else {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, value);
}
// fill texture slots
gl.uniform1i(this.getUniformLocation(uniform.name), textureSlot++);
} else if (Array.isArray(value)) {
switch (value.length) {
case 2:
gl.uniform2f(this.getUniformLocation(uniform.name), value[0], value[1]);
return;
case 3:
gl.uniform3f(this.getUniformLocation(uniform.name), value[0], value[1], value[2]);
return;
case 4:
gl.uniform4f(this.getUniformLocation(uniform.name), value[0], value[1], value[2], value[3]);
return;
default:
return;
}
} else if (typeof value === 'number') {
gl.uniform1f(this.getUniformLocation(uniform.name), value);
}
}.bind(this));
}
/**
* Get shader from the cache if it's in the cache. Otherwise, create
* the WebGL shader, compile it, and add entry to cache.
* TODO: make compilation errors show up
* @param {import("./Shader.js").default} shaderObject Shader object.
* @return {WebGLShader} Shader.
* @api
*/
getShader(shaderObject) {
const shaderKey = getUid(shaderObject);
if (shaderKey in this.shaderCache_) {
return this.shaderCache_[shaderKey];
} else {
const gl = this.getGL();
const shader = gl.createShader(shaderObject.getType());
gl.shaderSource(shader, shaderObject.getSource());
gl.compileShader(shader);
this.shaderCache_[shaderKey] = shader;
return shader;
}
}
/**
* Use a program. If the program is already in use, this will return `false`.
* @param {WebGLProgram} program Program.
* @return {boolean} Changed.
* @api
*/
useProgram(program) {
if (program == this.currentProgram_) {
return false;
} else {
const gl = this.getGL();
gl.useProgram(program);
this.currentProgram_ = program;
this.uniformLocations_ = {};
this.attribLocations_ = {};
return true;
}
}
/**
* Get the program from the cache if it's in the cache. Otherwise create
* the WebGL program, attach the shaders to it, and add an entry to the
* cache.
* @param {import("./Fragment.js").default} fragmentShaderObject Fragment shader.
* @param {import("./Vertex.js").default} vertexShaderObject Vertex shader.
* @return {WebGLProgram} Program.
* @api
*/
getProgram(fragmentShaderObject, vertexShaderObject) {
const programKey = getUid(fragmentShaderObject) + '/' + getUid(vertexShaderObject);
if (programKey in this.programCache_) {
return this.programCache_[programKey];
} else {
const gl = this.getGL();
const program = gl.createProgram();
gl.attachShader(program, this.getShader(fragmentShaderObject));
gl.attachShader(program, this.getShader(vertexShaderObject));
gl.linkProgram(program);
this.programCache_[programKey] = program;
return program;
}
}
/**
* Will get the location from the shader or the cache
* @param {string} name Uniform name
* @return {WebGLUniformLocation} uniformLocation
* @api
*/
getUniformLocation(name) {
if (!this.uniformLocations_[name]) {
this.uniformLocations_[name] = this.getGL().getUniformLocation(this.currentProgram_, name);
}
return this.uniformLocations_[name];
}
/**
* Will get the location from the shader or the cache
* @param {string} name Attribute name
* @return {number} attribLocation
* @api
*/
getAttributeLocation(name) {
if (!this.attribLocations_[name]) {
this.attribLocations_[name] = this.getGL().getAttribLocation(this.currentProgram_, name);
}
return this.attribLocations_[name];
}
/**
* Give a value for a standard float uniform
* @param {string} uniform Uniform name
* @param {number} value Value
* @api
*/
setUniformFloatValue(uniform, value) {
this.getGL().uniform1f(this.getUniformLocation(uniform), value);
}
/**
* Give a value for a standard matrix4 uniform
* @param {string} uniform Uniform name
* @param {Array<number>} value Matrix value
* @api
*/
setUniformMatrixValue(uniform, value) {
this.getGL().uniformMatrix4fv(this.getUniformLocation(uniform), false, value);
}
/**
* Will set the currently bound buffer to an attribute of the shader program
* @param {string} attribName Attribute name
* @param {number} size Number of components per attributes
* @param {number} type UNSIGNED_INT, UNSIGNED_BYTE, UNSIGNED_SHORT or FLOAT
* @param {number} stride Stride in bytes (0 means attribs are packed)
* @param {number} offset Offset in bytes
* @api
*/
enableAttributeArray(attribName, size, type, stride, offset) {
this.getGL().enableVertexAttribArray(this.getAttributeLocation(attribName));
this.getGL().vertexAttribPointer(this.getAttributeLocation(attribName), size, type,
false, stride, offset);
}
/**
* WebGL context was lost
* @private
*/
handleWebGLContextLost() {
clear(this.bufferCache_);
clear(this.shaderCache_);
clear(this.programCache_);
this.currentProgram_ = null;
}
/**
* WebGL context was restored
* @private
*/
handleWebGLContextRestored() {
}
// TODO: shutdown program
/**
* TODO: these are not used and should be reworked
* @param {number=} opt_wrapS wrapS.
* @param {number=} opt_wrapT wrapT.
* @return {WebGLTexture} The texture.
*/
createTextureInternal(opt_wrapS, opt_wrapT) {
const gl = this.getGL();
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
if (opt_wrapS !== undefined) {
gl.texParameteri(
TEXTURE_2D, TEXTURE_WRAP_S, opt_wrapS);
}
if (opt_wrapT !== undefined) {
gl.texParameteri(
TEXTURE_2D, TEXTURE_WRAP_T, opt_wrapT);
}
return texture;
}
/**
* TODO: these are not used and should be reworked
* @param {number} width Width.
* @param {number} height Height.
* @param {number=} opt_wrapS wrapS.
* @param {number=} opt_wrapT wrapT.
* @return {WebGLTexture} The texture.
*/
createEmptyTexture(width, height, opt_wrapS, opt_wrapT) {
const gl = this.getGL();
const texture = this.createTextureInternal(opt_wrapS, opt_wrapT);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
return texture;
}
/**
* TODO: these are not used and should be reworked
* @param {HTMLCanvasElement|HTMLImageElement|HTMLVideoElement} image Image.
* @param {number=} opt_wrapS wrapS.
* @param {number=} opt_wrapT wrapT.
* @return {WebGLTexture} The texture.
*/
createTexture(image, opt_wrapS, opt_wrapT) {
const gl = this.getGL();
const texture = this.createTextureInternal(opt_wrapS, opt_wrapT);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
return texture;
}
}
export default WebGLHelper;

View File

@@ -0,0 +1,302 @@
/**
* @module ol/webgl/PostProcessingPass
*/
const DEFAULT_VERTEX_SHADER = `
precision mediump float;
attribute vec2 a_position;
varying vec2 v_texCoord;
varying vec2 v_screenCoord;
uniform vec2 u_screenSize;
void main() {
v_texCoord = a_position * 0.5 + 0.5;
v_screenCoord = v_texCoord * u_screenSize;
gl_Position = vec4(a_position, 0.0, 1.0);
}
`;
const DEFAULT_FRAGMENT_SHADER = `
precision mediump float;
uniform sampler2D u_image;
varying vec2 v_texCoord;
varying vec2 v_screenCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
}
`;
/**
* @typedef {Object} Options
* @property {WebGLContext} webGlContext WebGL context; mandatory.
* @property {number} [scaleRatio] Scale ratio; if < 1, the post process will render to a texture smaller than
* the main canvas that will then be sampled up (useful for saving resource on blur steps).
* @property {string} [vertexShader] Vertex shader source
* @property {string} [fragmentShader] Fragment shader source
* @property {Object.<string,import("./Helper").UniformValue>} [uniforms] Uniform definitions for the post process step
*/
/**
* @typedef {Object} UniformInternalDescription
* @property {UniformValue} value Value
* @property {number} location Location
* @property {WebGLTexture} [texture] Texture
* @private
*/
/**
* @classdesc
* This class is used to define Post Processing passes with custom shaders and uniforms.
* This is used internally by {@link module:ol/webgl/Helper~WebGLHelper}.
*
* Default shaders are shown hereafter:
*
* * Vertex shader:
*
* ```
* precision mediump float;
*
* attribute vec2 a_position;
* varying vec2 v_texCoord;
* varying vec2 v_screenCoord;
*
* uniform vec2 u_screenSize;
*
* void main() {
* v_texCoord = a_position * 0.5 + 0.5;
* v_screenCoord = v_texCoord * u_screenSize;
* gl_Position = vec4(a_position, 0.0, 1.0);
* }
* ```
*
* * Fragment shader:
*
* ```
* precision mediump float;
*
* uniform sampler2D u_image;
*
* varying vec2 v_texCoord;
* varying vec2 v_screenCoord;
*
* void main() {
* gl_FragColor = texture2D(u_image, v_texCoord);
* }
* ```
*
* @api
*/
class WebGLPostProcessingPass {
/**
* @param {Options=} options Options.
*/
constructor(options) {
this.gl_ = options.webGlContext;
const gl = this.gl_;
this.scaleRatio_ = options.scaleRatio || 1;
this.renderTargetTexture_ = gl.createTexture();
this.renderTargetTextureSize_ = null;
this.frameBuffer_ = gl.createFramebuffer();
// compile the program for the frame buffer
// TODO: make compilation errors show up
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, options.vertexShader || DEFAULT_VERTEX_SHADER);
gl.compileShader(vertexShader);
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, options.fragmentShader || DEFAULT_FRAGMENT_SHADER);
gl.compileShader(fragmentShader);
this.renderTargetProgram_ = gl.createProgram();
gl.attachShader(this.renderTargetProgram_, vertexShader);
gl.attachShader(this.renderTargetProgram_, fragmentShader);
gl.linkProgram(this.renderTargetProgram_);
// bind the vertices buffer for the frame buffer
this.renderTargetVerticesBuffer_ = gl.createBuffer();
const verticesArray = [
-1, -1,
1, -1,
-1, 1,
1, -1,
1, 1,
-1, 1
];
gl.bindBuffer(gl.ARRAY_BUFFER, this.renderTargetVerticesBuffer_);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verticesArray), gl.STATIC_DRAW);
this.renderTargetAttribLocation_ = gl.getAttribLocation(this.renderTargetProgram_, 'a_position');
this.renderTargetUniformLocation_ = gl.getUniformLocation(this.renderTargetProgram_, 'u_screenSize');
this.renderTargetTextureLocation_ = gl.getUniformLocation(this.renderTargetProgram_, 'u_image');
/**
* Holds info about custom uniforms used in the post processing pass
* @type {Array<UniformInternalDescription>}
* @private
*/
this.uniforms_ = [];
options.uniforms && Object.keys(options.uniforms).forEach(function(name) {
this.uniforms_.push({
value: options.uniforms[name],
location: gl.getUniformLocation(this.renderTargetProgram_, name)
});
}.bind(this));
}
/**
* Get the WebGL rendering context
* @return {WebGLRenderingContext} The rendering context.
* @api
*/
getGL() {
return this.gl_;
}
/**
* Initialize the render target texture of the post process, make sure it is at the
* right size and bind it as a render target for the next draw calls.
* The last step to be initialized will be the one where the primitives are rendered.
* @param {import("../PluggableMap.js").FrameState} frameState current frame state
* @api
*/
init(frameState) {
const gl = this.getGL();
const canvas = gl.canvas;
const size = frameState.size;
// rendering goes to my buffer
gl.bindFramebuffer(gl.FRAMEBUFFER, this.getFrameBuffer());
gl.viewport(0, 0, canvas.width * this.scaleRatio_, canvas.height * this.scaleRatio_);
// if size has changed: adjust canvas & render target texture
if (!this.renderTargetTextureSize_ ||
this.renderTargetTextureSize_[0] !== size[0] || this.renderTargetTextureSize_[1] !== size[1]) {
this.renderTargetTextureSize_ = size;
// create a new texture
const level = 0;
const internalFormat = gl.RGBA;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.bindTexture(gl.TEXTURE_2D, this.renderTargetTexture_);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
canvas.width * this.scaleRatio_, canvas.height * this.scaleRatio_, border,
format, type, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// bind the texture to the framebuffer
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this.renderTargetTexture_, 0);
}
}
/**
* Render to the next postprocessing pass (or to the canvas if final pass).
* @param {import("../PluggableMap.js").FrameState} frameState current frame state
* @param {WebGLPostProcessingPass} [nextPass] Next pass, optional
* @api
*/
apply(frameState, nextPass) {
const gl = this.getGL();
const canvas = gl.canvas;
gl.bindFramebuffer(gl.FRAMEBUFFER, nextPass ? nextPass.getFrameBuffer() : null);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, this.renderTargetTexture_);
// render the frame buffer to the canvas
gl.clearColor(0.0, 0.0, 0.0, 0.0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.enable(gl.BLEND);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
gl.viewport(0, 0, canvas.width, canvas.height);
gl.bindBuffer(gl.ARRAY_BUFFER, this.renderTargetVerticesBuffer_);
gl.useProgram(this.renderTargetProgram_);
gl.enableVertexAttribArray(this.renderTargetAttribLocation_);
gl.vertexAttribPointer(this.renderTargetAttribLocation_, 2, gl.FLOAT, false, 0, 0);
gl.uniform2f(this.renderTargetUniformLocation_, canvas.width, canvas.height);
gl.uniform1i(this.renderTargetTextureLocation_, 0);
this.applyUniforms(frameState);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
/**
* @returns {WebGLFramebuffer} Frame buffer
* @api
*/
getFrameBuffer() {
return this.frameBuffer_;
}
/**
* Sets the custom uniforms based on what was given in the constructor.
* @param {import("../PluggableMap.js").FrameState} frameState Frame state.
* @private
*/
applyUniforms(frameState) {
const gl = this.getGL();
let value;
let textureSlot = 1;
this.uniforms_.forEach(function(uniform) {
value = typeof uniform.value === 'function' ? uniform.value(frameState) : uniform.value;
// apply value based on type
if (value instanceof HTMLCanvasElement || value instanceof ImageData) {
// create a texture & put data
if (!uniform.texture) {
uniform.texture = gl.createTexture();
}
gl.activeTexture(gl[`TEXTURE${textureSlot}`]);
gl.bindTexture(gl.TEXTURE_2D, uniform.texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
if (value instanceof ImageData) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, value.width, value.height, 0,
gl.UNSIGNED_BYTE, new Uint8Array(value.data));
} else {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, value);
}
// fill texture slots
gl.uniform1i(uniform.location, textureSlot++);
} else if (Array.isArray(value)) {
switch (value.length) {
case 2:
gl.uniform2f(uniform.location, value[0], value[1]);
return;
case 3:
gl.uniform3f(uniform.location, value[0], value[1], value[2]);
return;
case 4:
gl.uniform4f(uniform.location, value[0], value[1], value[2], value[3]);
return;
default: return;
}
} else if (typeof value === 'number') {
gl.uniform1f(uniform.location, value);
}
});
}
}
export default WebGLPostProcessingPass;

View File

@@ -1,17 +0,0 @@
/**
* @module {{{module}}}
*/
// This file is automatically generated, do not edit.
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../webgl.js';
import WebGLFragment from '../../../webgl/Fragment.js';
import WebGLVertex from '../../../webgl/Vertex.js';
export const fragment = new WebGLFragment(DEBUG_WEBGL ?
'precision mediump float;\n{{{originalFragmentSource}}}' :
'precision mediump float;{{{fragmentSource}}}');
export const vertex = new WebGLVertex(DEBUG_WEBGL ?
'{{{originalVertexSource}}}' :
'{{{vertexSource}}}');

View File

@@ -1,37 +0,0 @@
/**
* @module {{{module}}}/Locations
*/
// This file is automatically generated, do not edit
// Run `make shaders` to generate, and commit the result.
import {DEBUG as DEBUG_WEBGL} from '../../../../webgl.js';
class Locations {
/**
* @param {WebGLRenderingContext} gl GL.
* @param {WebGLProgram} program Program.
*/
constructor(gl, program) {
{{#uniforms}}
/**
* @type {WebGLUniformLocation}
*/
this.{{originalName}} = gl.getUniformLocation(
program, DEBUG_WEBGL ? '{{originalName}}' : '{{shortName}}');
{{/uniforms}}
{{#attributes}}
/**
* @type {number}
*/
this.{{originalName}} = gl.getAttribLocation(
program, DEBUG_WEBGL ? '{{originalName}}' : '{{shortName}}');
{{/attributes}}
}
}
export default Locations;

View File

@@ -1,146 +0,0 @@
const fs = require('fs');
const ESCAPE_SEQUENCE = {
'\\': '\\\\',
'\n': '\\n',
'\t': '\\t'
};
function js_escape(s) {
return s.split('').map(function(c) {
return ESCAPE_SEQUENCE[c] || c;
}).join('');
}
function glsl_compress(s, shortNames) {
// strip leading whitespace
s = s.replace(/^\s+/g, '');
// strip trailing whitespace
s = s.replace(/\s+$/g, '');
// strip multi-line comments
s = s.replace(/\/\*[\s\S]*?\*\//g, '');
// strip single line comments
s = s.replace(/\/\/.*?\n/g, '');
// replace multiple whitespace with a single space
s = s.replace(/\s+/g, ' ');
// remove whitespace between non-word tokens
s = s.replace(/(\S)\s+([^\w])/g, '$1$2')
.replace(/([^\w])\s+(\S)/g, '$1$2');
// replace original names with short names
for (const originalName in shortNames) {
s = s.replace(new RegExp(originalName, 'gm'), shortNames[originalName]);
}
return s;
}
function main(argv) {
const options = {};
for (let i = 2, ii = argv.length; i < ii; i += 2) {
options[argv[i].replace(/^../, '')] = argv[i + 1];
}
if (!options.input) {
process.stdout.write('--input option missing\n');
return 1;
}
const json = {};
let nextShortName = 'a'.charCodeAt(0);
const shortNames = {};
const attributes = {};
const uniforms = {};
const varyings = {};
const blocks = {
common: '',
vertex: '',
fragment: ''
};
let block = undefined;
const inFile = fs.readFileSync(options.input, 'utf-8');
const lines = inFile.split('\n');
let m, shortName;
lines.forEach(function(line, i) {
if (line.indexOf('//!') == 0) {
m = line.match(/\/\/!\s+MODULE=(\S+)\s*$/);
if (m) {
json.module = m[1];
return;
}
m = line.match(/\/\/!\s+COMMON\s*$/);
if (m) {
block = 'common';
return;
}
m = line.match(/\/\/!\s+VERTEX\s*$/);
if (m) {
block = 'vertex';
return;
}
m = line.match(/\/\/!\s+FRAGMENT\s*$/);
if (m) {
block = 'fragment';
return;
}
} else {
if (block === undefined) {
if (line.replace(/\s+$/g, '') != '') {
process.stdout.write(`Error parsing ${options.input}\n`);
return;
}
} else {
blocks[block] += line + (i == lines.length - 1 ? '' : '\n');
}
m = line.match(/attribute\s+\S+\s+(\S+);\s*$/);
if (m) {
const attribute = m[1];
if (!(attribute in attributes)) {
shortName = String.fromCharCode(nextShortName++);
attributes[attribute] = {
originalName: attribute,
shortName: shortName
};
shortNames[attribute] = shortName;
}
}
m = line.match(/uniform\s+\S+\s+(\S+);\s*$/);
if (m) {
const uniform = m[1];
if (!(uniform in uniforms)) {
shortName = String.fromCharCode(nextShortName++);
uniforms[uniform] = {
originalName: uniform,
shortName: shortName
};
shortNames[uniform] = shortName;
}
}
m = line.match(/varying\s+\S+\s+(\S+);\s*$/);
if (m) {
const varying = m[1];
if (!(varying in varyings)) {
shortName = String.fromCharCode(nextShortName++);
shortNames[varying] = shortName;
}
}
}
});
json.originalFragmentSource = js_escape(blocks.common + blocks.fragment);
json.originalVertexSource = js_escape(blocks.common + blocks.vertex);
json.fragmentSource = glsl_compress(blocks.common + blocks.fragment, shortNames);
json.vertexSource = glsl_compress(blocks.common + blocks.vertex, shortNames);
json.attributes = Object.keys(attributes).map(a => attributes[a]);
json.uniforms = Object.keys(uniforms).map(u => uniforms[u]);
if (options.output && options.output != '-') {
fs.writeFileSync(options.output, JSON.stringify(json));
} else {
process.stdout.write(JSON.stringify(json));
}
return 0;
}
if (require.main === module) {
process.exit(main(process.argv));
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 838 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 801 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 733 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 799 B

View File

@@ -1,8 +1,6 @@
import Map from '../../../../src/ol/Map.js';
import WebGLMap from '../../../../src/ol/WebGLMap.js';
import View from '../../../../src/ol/View.js';
import ImageLayer from '../../../../src/ol/layer/Image.js';
import WebGLImageLayer from '../../../../src/ol/layer/Image.js';
import {assign} from '../../../../src/ol/obj.js';
import {get as getProjection, transform, transformExtent} from '../../../../src/ol/proj.js';
import Static from '../../../../src/ol/source/ImageStatic.js';
@@ -14,7 +12,7 @@ describe('ol.rendering.layer.Image', function() {
let map;
function createMap(renderer) {
const MapConstructor = renderer === 'webgl' ? WebGLMap : Map;
const MapConstructor = Map;
map = new MapConstructor({
pixelRatio: 1,
target: createMapDiv(50, 50),
@@ -34,7 +32,7 @@ describe('ol.rendering.layer.Image', function() {
});
function waitForImages(renderer, sources, layerOptions, onImagesLoaded) {
const LayerConstructor = renderer === 'webgl' ? WebGLImageLayer : ImageLayer;
const LayerConstructor = ImageLayer;
let imagesLoading = 0;
let imagesLoaded = 0;
@@ -83,15 +81,6 @@ describe('ol.rendering.layer.Image', function() {
IMAGE_TOLERANCE, done);
});
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
waitForImages('webgl', [source], {}, function() {
expectResemble(map, 'rendering/ol/layer/expected/image-webgl.png',
IMAGE_TOLERANCE, done);
});
});
});
describe('single image layer - scaled', function() {

View File

@@ -1,10 +1,8 @@
import Map from '../../../../src/ol/Map.js';
import WebGLMap from '../../../../src/ol/WebGLMap.js';
import View from '../../../../src/ol/View.js';
import {getSize} from '../../../../src/ol/extent.js';
import Point from '../../../../src/ol/geom/Point.js';
import TileLayer from '../../../../src/ol/layer/Tile.js';
import WebGLTileLayer from '../../../../src/ol/layer/WebGLTile.js';
import {assign} from '../../../../src/ol/obj.js';
import {transform} from '../../../../src/ol/proj.js';
import TileImage from '../../../../src/ol/source/TileImage.js';
@@ -20,7 +18,7 @@ describe('ol.rendering.layer.Tile', function() {
let map;
function createMap(renderer, opt_center, opt_size, opt_pixelRatio, opt_resolutions) {
const MapConstructor = renderer === 'webgl' ? WebGLMap : Map;
const MapConstructor = Map;
const size = opt_size !== undefined ? opt_size : [50, 50];
map = new MapConstructor({
@@ -43,7 +41,7 @@ describe('ol.rendering.layer.Tile', function() {
});
function waitForTiles(renderer, sources, layerOptions, onTileLoaded) {
const LayerConstructor = renderer === 'webgl' ? WebGLTileLayer : TileLayer;
const LayerConstructor = TileLayer;
let tilesLoading = 0;
let tileLoaded = 0;
@@ -73,6 +71,40 @@ describe('ol.rendering.layer.Tile', function() {
});
}
describe('with tile transition', function() {
it('renders correctly after the transition', function(done) {
createMap('canvas');
const source = new XYZ({
url: 'rendering/ol/data/tiles/osm/{z}/{x}/{y}.png'
});
waitForTiles('canvas', [source], {}, function() {
setTimeout(function() {
expectResemble(map, 'rendering/ol/layer/expected/osm-canvas.png',
IMAGE_TOLERANCE, done);
}, 500);
});
});
});
describe('single tile layer', function() {
let source;
beforeEach(function() {
source = new XYZ({
url: 'rendering/ol/data/tiles/osm/{z}/{x}/{y}.png',
transition: 0
});
});
it('tests the canvas renderer', function(done) {
createMap('canvas');
waitForTiles('canvas', [source], {}, function() {
expectResemble(map, 'rendering/ol/layer/expected/osm-canvas.png',
IMAGE_TOLERANCE, done);
});
});
});
describe('two tile layers', function() {
let source1, source2;
@@ -87,15 +119,6 @@ describe('ol.rendering.layer.Tile', function() {
});
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
waitForTiles('webgl', [source1, source2], {}, function() {
expectResemble(map, 'rendering/ol/layer/expected/2-layers-webgl.png',
IMAGE_TOLERANCE, done);
});
});
function centerExtent(map) {
const c = map.getView().calculateExtent(map.getSize());
const qw = getSize(c)[0] / 4;
@@ -148,15 +171,6 @@ describe('ol.rendering.layer.Tile', function() {
IMAGE_TOLERANCE, done);
});
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
waitForTiles('webgl', [source], {opacity: 0.2}, function() {
expectResemble(map, 'rendering/ol/layer/expected/opacity-webgl.png',
IMAGE_TOLERANCE, done);
});
});
});
describe('tile layer with non-square tiles', function() {
@@ -180,16 +194,6 @@ describe('ol.rendering.layer.Tile', function() {
});
});
where('WebGL').it('512x256 renders correcly using the webgl renderer', function(done) {
assertWebGL();
const source = createSource('512x256');
createMap('webgl', [-10997148, 4569099]);
waitForTiles('webgl', [source], {}, function() {
expectResemble(map, 'rendering/ol/layer/expected/512x256-webgl.png',
IMAGE_TOLERANCE, done);
});
});
it('192x256 renders correcly using the canvas renderer', function(done) {
const source = createSource('192x256');
createMap('canvas', [-11271098, 3747248], [100, 100], undefined,
@@ -199,17 +203,6 @@ describe('ol.rendering.layer.Tile', function() {
IMAGE_TOLERANCE, done);
});
});
where('WebGL').it('192x256 renders correcly using the webgl renderer', function(done) {
assertWebGL();
const source = createSource('192x256');
createMap('webgl', [-11271098, 3747248], [100, 100], undefined,
source.getTileGrid().getResolutions());
waitForTiles('webgl', [source], {}, function() {
expectResemble(map, 'rendering/ol/layer/expected/192x256-webgl.png',
IMAGE_TOLERANCE, done);
});
});
});
describe('tile layer with render listener', function() {

View File

@@ -1,10 +1,8 @@
import Feature from '../../../src/ol/Feature.js';
import Point from '../../../src/ol/geom/Point.js';
import Map from '../../../src/ol/Map.js';
import WebGLMap from '../../../src/ol/WebGLMap.js';
import View from '../../../src/ol/View.js';
import VectorLayer from '../../../src/ol/layer/Vector.js';
import WebGLVectorLayer from '../../../src/ol/layer/WebGLVector.js';
import VectorSource from '../../../src/ol/source/Vector.js';
@@ -12,8 +10,8 @@ describe('ol.rendering.Map', function() {
let map;
function createMap(renderer) {
const MapConstructor = renderer === 'webgl' ? WebGLMap : Map;
const LayerConstructor = renderer === 'webgl' ? WebGLVectorLayer : VectorLayer;
const MapConstructor = Map;
const LayerConstructor = VectorLayer;
const vectorLayer = new LayerConstructor({
source: new VectorSource({
@@ -53,17 +51,6 @@ describe('ol.rendering.Map', function() {
done();
});
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
map.once('postrender', function() {
const initialSize = map.getSize();
map.updateSize();
expect(map.getSize()).to.eql(initialSize);
done();
});
});
});
describe('#render()', function() {
@@ -73,13 +60,6 @@ describe('ol.rendering.Map', function() {
expectResemble(
map, 'rendering/ol/expected/render-canvas.png', IMAGE_TOLERANCE, done);
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
expectResemble(
map, 'rendering/ol/expected/render-webgl.png', IMAGE_TOLERANCE, done);
});
});
describe('#pan()', function() {
@@ -90,37 +70,5 @@ describe('ol.rendering.Map', function() {
expectResemble(
map, 'rendering/ol/expected/pan-canvas.png', IMAGE_TOLERANCE, done);
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
map.getView().setCenter([10, 10]);
expectResemble(
map, 'rendering/ol/expected/pan-webgl.png', IMAGE_TOLERANCE, done);
});
});
describe('#rotate()', function() {
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
map.getView().setRotation(90);
map.getView().setCenter([10, 10]);
expectResemble(
map, 'rendering/ol/expected/rotate-webgl.png', IMAGE_TOLERANCE, done);
});
});
describe('#zoom()', function() {
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
map.getView().setCenter([10, 10]);
map.getView().setResolution(2);
expectResemble(
map, 'rendering/ol/expected/zoom-webgl.png', IMAGE_TOLERANCE, done);
});
});
});

View File

@@ -1,8 +1,6 @@
import WebGLMap from '../../../../src/ol/WebGLMap.js';
import Map from '../../../../src/ol/Map.js';
import View from '../../../../src/ol/View.js';
import TileLayer from '../../../../src/ol/layer/Tile.js';
import WebGLTileLayer from '../../../../src/ol/layer/WebGLTile.js';
import TileWMS from '../../../../src/ol/source/TileWMS.js';
describe('ol.rendering.source.TileWMS', function() {
@@ -27,7 +25,7 @@ describe('ol.rendering.source.TileWMS', function() {
let map;
function createMap(renderer, pixelRatio) {
const MapConstructor = renderer === 'webgl' ? WebGLMap : Map;
const MapConstructor = Map;
map = new MapConstructor({
target: createMapDiv(200, 200),
@@ -69,18 +67,6 @@ describe('ol.rendering.source.TileWMS', function() {
source: source
}));
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl', 1);
const source = createSource(0);
tilesLoaded(source, function() {
expectResemble(map, 'rendering/ol/source/expected/0_1.webgl.png', IMAGE_TOLERANCE, done);
});
map.addLayer(new WebGLTileLayer({
source: source
}));
});
});
describe('0px gutter, 2 pixel ratio', function() {
@@ -94,18 +80,6 @@ describe('ol.rendering.source.TileWMS', function() {
source: source
}));
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl', 2);
const source = createSource(0);
tilesLoaded(source, function() {
expectResemble(map, 'rendering/ol/source/expected/0_2.webgl.png', IMAGE_TOLERANCE, done);
});
map.addLayer(new WebGLTileLayer({
source: source
}));
});
});
@@ -120,18 +94,6 @@ describe('ol.rendering.source.TileWMS', function() {
source: source
}));
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl', 1);
const source = createSource(20);
tilesLoaded(source, function() {
expectResemble(map, 'rendering/ol/source/expected/20_1.webgl.png', IMAGE_TOLERANCE, done);
});
map.addLayer(new WebGLTileLayer({
source: source
}));
});
});
describe('20px gutter, 2 pixel ratio', function() {
@@ -145,18 +107,6 @@ describe('ol.rendering.source.TileWMS', function() {
source: source
}));
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl', 2);
const source = createSource(20);
tilesLoaded(source, function() {
expectResemble(map, 'rendering/ol/source/expected/20_2.webgl.png', IMAGE_TOLERANCE, done);
});
map.addLayer(new WebGLTileLayer({
source: source
}));
});
});
});

View File

@@ -2,10 +2,8 @@ import Feature from '../../../../src/ol/Feature.js';
import Point from '../../../../src/ol/geom/Point.js';
import MultiPoint from '../../../../src/ol/geom/MultiPoint.js';
import Map from '../../../../src/ol/Map.js';
import WebGLMap from '../../../../src/ol/WebGLMap.js';
import View from '../../../../src/ol/View.js';
import VectorLayer from '../../../../src/ol/layer/Vector.js';
import WebGLVectorLayer from '../../../../src/ol/layer/Vector.js';
import VectorSource from '../../../../src/ol/source/Vector.js';
import CircleStyle from '../../../../src/ol/style/Circle.js';
import Fill from '../../../../src/ol/style/Fill.js';
@@ -18,8 +16,8 @@ describe('ol.rendering.style.Circle', function() {
let map, vectorSource;
function createMap(renderer) {
const MapConstructor = renderer === 'webgl' ? WebGLMap : Map;
const LayerConstructor = renderer === 'webgl' ? WebGLVectorLayer : VectorLayer;
const MapConstructor = Map;
const LayerConstructor = VectorLayer;
vectorSource = new VectorSource();
const vectorLayer = new LayerConstructor({
@@ -201,13 +199,5 @@ describe('ol.rendering.style.Circle', function() {
expectResemble(map, 'rendering/ol/style/expected/circle-canvas.png',
8.0, done);
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
createFeatures();
expectResemble(map, 'rendering/ol/style/expected/circle-webgl.png',
8.0, done);
});
});
});

View File

@@ -1,10 +1,8 @@
import Feature from '../../../../src/ol/Feature.js';
import LineString from '../../../../src/ol/geom/LineString.js';
import Map from '../../../../src/ol/Map.js';
import WebGLMap from '../../../../src/ol/WebGLMap.js';
import View from '../../../../src/ol/View.js';
import VectorLayer from '../../../../src/ol/layer/Vector.js';
import WebGLVectorLayer from '../../../../src/ol/layer/Vector.js';
import VectorSource from '../../../../src/ol/source/Vector.js';
import Style from '../../../../src/ol/style/Style.js';
import Stroke from '../../../../src/ol/style/Stroke.js';
@@ -15,8 +13,8 @@ describe('ol.rendering.style.LineString', function() {
let map, vectorSource;
function createMap(renderer, opt_pixelRatio) {
const MapConstructor = renderer === 'webgl' ? WebGLMap : Map;
const LayerConstructor = renderer === 'webgl' ? WebGLVectorLayer : VectorLayer;
const MapConstructor = Map;
const LayerConstructor = VectorLayer;
vectorSource = new VectorSource();
const vectorLayer = new LayerConstructor({
@@ -120,13 +118,6 @@ describe('ol.rendering.style.LineString', function() {
map, 'rendering/ol/style/expected/linestring-strokes-canvas.png',
3.0, done);
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
createFeatures();
expectResemble(map, 'rendering/ol/style/expected/linestring-strokes-webgl.png',
14.6, done);
});
it('tests the canvas renderer (HiDPI)', function(done) {
createMap('canvas', 2);

View File

@@ -1,10 +1,8 @@
import Feature from '../../../../src/ol/Feature.js';
import Point from '../../../../src/ol/geom/Point.js';
import Map from '../../../../src/ol/Map.js';
import WebGLMap from '../../../../src/ol/WebGLMap.js';
import View from '../../../../src/ol/View.js';
import VectorLayer from '../../../../src/ol/layer/Vector.js';
import WebGLVectorLayer from '../../../../src/ol/layer/Vector.js';
import VectorSource from '../../../../src/ol/source/Vector.js';
import Fill from '../../../../src/ol/style/Fill.js';
import RegularShape from '../../../../src/ol/style/RegularShape.js';
@@ -17,8 +15,8 @@ describe('ol.rendering.style.RegularShape', function() {
let map, vectorSource;
function createMap(renderer) {
const MapConstructor = renderer === 'webgl' ? WebGLMap : Map;
const LayerConstructor = renderer === 'webgl' ? WebGLVectorLayer : VectorLayer;
const MapConstructor = Map;
const LayerConstructor = VectorLayer;
vectorSource = new VectorSource();
const vectorLayer = new LayerConstructor({
@@ -137,13 +135,6 @@ describe('ol.rendering.style.RegularShape', function() {
}));
expectResemble(map, 'rendering/ol/style/expected/regularshape-canvas-linedashoffset.png', 5, done);
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
createFeatures(stroke, fill);
expectResemble(map, 'rendering/ol/style/expected/regularshape-webgl.png', 8.2, done);
});
});
describe('uses the default fill and stroke color', function() {
@@ -155,12 +146,5 @@ describe('ol.rendering.style.RegularShape', function() {
createFeatures(stroke, fill);
expectResemble(map, 'rendering/ol/style/expected/regularshape-canvas-default-style.png', 3.0, done);
});
where('WebGL').it('tests the WebGL renderer', function(done) {
assertWebGL();
createMap('webgl');
createFeatures(stroke, fill);
expectResemble(map, 'rendering/ol/style/expected/regularshape-webgl-default-style.png', 3.0, done);
});
});
});

View File

@@ -5,10 +5,8 @@ import MultiPolygon from '../../../../src/ol/geom/MultiPolygon.js';
import Point from '../../../../src/ol/geom/Point.js';
import Polygon from '../../../../src/ol/geom/Polygon.js';
import Map from '../../../../src/ol/Map.js';
import WebGLMap from '../../../../src/ol/WebGLMap.js';
import View from '../../../../src/ol/View.js';
import VectorLayer from '../../../../src/ol/layer/Vector.js';
import WebGLVectorLayer from '../../../../src/ol/layer/Vector.js';
import VectorSource from '../../../../src/ol/source/Vector.js';
import Text from '../../../../src/ol/style/Text.js';
import Fill from '../../../../src/ol/style/Fill.js';
@@ -20,8 +18,8 @@ describe('ol.rendering.style.Text', function() {
let map, vectorSource;
function createMap(renderer, opt_pixelRatio) {
const MapConstructor = renderer === 'webgl' ? WebGLMap : Map;
const LayerConstructor = renderer === 'webgl' ? WebGLVectorLayer : VectorLayer;
const MapConstructor = Map;
const LayerConstructor = VectorLayer;
const pixelRatio = opt_pixelRatio || 1;
vectorSource = new VectorSource();
@@ -446,18 +444,5 @@ describe('ol.rendering.style.Text', function() {
});
where('WebGL').it('tests the webgl renderer without rotation', function(done) {
createMap('webgl');
createFeatures();
expectResemble(map, 'rendering/ol/style/expected/text-webgl.png', 1.8, done);
});
where('WebGL').it('tests the webgl renderer with rotation', function(done) {
createMap('webgl');
createFeatures();
map.getView().setRotation(Math.PI / 7);
expectResemble(map, 'rendering/ol/style/expected/text-rotated-webgl.png', 1.8, done);
});
});
});

View File

@@ -1,240 +0,0 @@
import {getUid} from '../../../../../src/ol/util.js';
import Feature from '../../../../../src/ol/Feature.js';
import Circle from '../../../../../src/ol/geom/Circle.js';
import WebGLCircleReplay from '../../../../../src/ol/render/webgl/CircleReplay.js';
import {fragment, vertex} from '../../../../../src/ol/render/webgl/circlereplay/defaultshader.js';
import Locations from '../../../../../src/ol/render/webgl/circlereplay/defaultshader/Locations.js';
import Fill from '../../../../../src/ol/style/Fill.js';
import Stroke from '../../../../../src/ol/style/Stroke.js';
describe('ol.render.webgl.CircleReplay', function() {
let replay;
const strokeStyle = new Stroke({
color: [0, 255, 0, 0.4]
});
const fillStyle = new Fill({
color: [255, 0, 0, 1]
});
beforeEach(function() {
const tolerance = 0.1;
const maxExtent = [-10000, -20000, 10000, 20000];
replay = new WebGLCircleReplay(tolerance, maxExtent);
});
describe('#setFillStrokeStyle', function() {
it('set expected states', function() {
replay.setFillStrokeStyle(fillStyle, strokeStyle);
expect(replay.state_).not.be(null);
expect(replay.state_.strokeColor).to.eql([0, 1, 0, 0.4]);
expect(replay.state_.lineWidth).to.be(1);
expect(replay.state_.fillColor).to.eql([1, 0, 0, 1]);
expect(replay.state_.changed).to.be(true);
expect(replay.styles_).to.have.length(1);
});
it('sets a transparent stroke, if none provided', function() {
replay.setFillStrokeStyle(fillStyle, null);
expect(replay.state_.strokeColor).to.eql([0, 0, 0, 0]);
});
it('sets a transparent fill, if none provided', function() {
replay.setFillStrokeStyle(null, strokeStyle);
expect(replay.state_.fillColor).to.eql([0, 0, 0, 0]);
});
});
describe('#drawCircle', function() {
it('sets the buffer data', function() {
const circle = new Circle([0, 0], 5000);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawCircle(circle, null);
expect(replay.vertices).to.have.length(16);
expect(replay.indices).to.have.length(6);
expect(replay.state_.changed).to.be(false);
expect(replay.startIndices).to.have.length(1);
expect(replay.startIndicesFeature).to.have.length(1);
expect(replay.radius_).to.be(5000);
});
it('does not draw if radius is zero', function() {
const circle = new Circle([0, 0], 0);
replay.drawCircle(circle, null);
expect(replay.vertices).to.have.length(0);
expect(replay.indices).to.have.length(0);
expect(replay.startIndices).to.have.length(0);
expect(replay.startIndicesFeature).to.have.length(0);
});
it('resets state and removes style if it belongs to a zero radius circle', function() {
const circle = new Circle([0, 0], 0);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.setFillStrokeStyle(null, strokeStyle);
replay.drawCircle(circle, null);
expect(replay.styles_).to.have.length(1);
expect(replay.state_).not.be(null);
expect(replay.state_.strokeColor).to.eql([0, 1, 0, 0.4]);
expect(replay.state_.lineWidth).to.be(1);
expect(replay.state_.fillColor).to.eql([1, 0, 0, 1]);
expect(replay.state_.changed).to.be(false);
});
});
describe('#drawCoordinates_', function() {
it('envelopes the circle into a right isosceles triangle', function() {
replay.radius_ = 5000;
replay.drawCoordinates_([0, 0], 0, 2, 2);
expect(replay.vertices).to.eql([0, 0, 0, 5000, 0, 0, 1, 5000,
0, 0, 2, 5000, 0, 0, 3, 5000]);
expect(replay.indices).to.eql([0, 1, 2, 2, 3, 0]);
});
});
describe('#setUpProgram', function() {
let context, gl;
beforeEach(function() {
context = {
getProgram: function() {},
useProgram: function() {}
};
gl = {
enableVertexAttribArray: function() {},
vertexAttribPointer: function() {},
uniform1f: function() {},
uniform2fv: function() {},
getUniformLocation: function() {},
getAttribLocation: function() {}
};
});
it('returns the locations used by the shaders', function() {
const locations = replay.setUpProgram(gl, context, [2, 2], 1);
expect(locations).to.be.a(Locations);
});
it('gets and compiles the shaders', function() {
sinon.spy(context, 'getProgram');
sinon.spy(context, 'useProgram');
replay.setUpProgram(gl, context, [2, 2], 1);
expect(context.getProgram.calledWithExactly(fragment, vertex)).to.be(true);
expect(context.useProgram.calledOnce).to.be(true);
});
it('initializes the attrib pointers', function() {
sinon.spy(gl, 'getAttribLocation');
sinon.spy(gl, 'vertexAttribPointer');
sinon.spy(gl, 'enableVertexAttribArray');
replay.setUpProgram(gl, context, [2, 2], 1);
expect(gl.vertexAttribPointer.callCount).to.be(gl.getAttribLocation.callCount);
expect(gl.enableVertexAttribArray.callCount).to.be(
gl.getAttribLocation.callCount);
});
});
describe('#shutDownProgram', function() {
let context, gl;
beforeEach(function() {
context = {
getProgram: function() {},
useProgram: function() {}
};
gl = {
enableVertexAttribArray: function() {},
disableVertexAttribArray: function() {},
vertexAttribPointer: function() {},
uniform1f: function() {},
uniform2fv: function() {},
getUniformLocation: function() {},
getAttribLocation: function() {}
};
});
it('disables the attrib pointers', function() {
sinon.spy(gl, 'getAttribLocation');
sinon.spy(gl, 'disableVertexAttribArray');
const locations = replay.setUpProgram(gl, context, [2, 2], 1);
replay.shutDownProgram(gl, locations);
expect(gl.disableVertexAttribArray.callCount).to.be(
gl.getAttribLocation.callCount);
});
});
describe('#drawReplay', function() {
let gl, context;
const feature1 = new Feature({
geometry: new Circle([0, 0], 5000)
});
const feature2 = new Feature({
geometry: new Circle([10, 10], 5000)
});
const feature3 = new Feature({
geometry: new Circle([20, 20], 5000)
});
beforeEach(function() {
gl = {};
context = {};
replay.setFillStyle_ = function() {};
replay.setStrokeStyle_ = function() {};
replay.drawElements = function() {};
sinon.spy(replay, 'setFillStyle_');
sinon.spy(replay, 'setStrokeStyle_');
sinon.spy(replay, 'drawElements');
});
it('draws the elements in a single call if they have the same style', function() {
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawCircle(feature1.getGeometry(), feature1);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawCircle(feature2.getGeometry(), feature2);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawCircle(feature3.getGeometry(), feature3);
replay.startIndices.push(replay.indices.length);
replay.drawReplay(gl, context, {}, false);
expect(replay.setFillStyle_.calledOnce).to.be(true);
expect(replay.setStrokeStyle_.calledOnce).to.be(true);
expect(replay.drawElements.calledOnce).to.be(true);
});
it('draws the elements in batches if there are multiple styles', function() {
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawCircle(feature1.getGeometry(), feature1);
replay.setFillStrokeStyle(fillStyle, null);
replay.drawCircle(feature2.getGeometry(), feature2);
replay.setFillStrokeStyle(strokeStyle, null);
replay.drawCircle(feature3.getGeometry(), feature3);
replay.startIndices.push(replay.indices.length);
replay.drawReplay(gl, context, {}, false);
expect(replay.setFillStyle_.calledThrice).to.be(true);
expect(replay.setStrokeStyle_.calledThrice).to.be(true);
expect(replay.drawElements.calledThrice).to.be(true);
});
it('can skip elements if needed', function() {
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawCircle(feature1.getGeometry(), feature1);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawCircle(feature2.getGeometry(), feature2);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawCircle(feature3.getGeometry(), feature3);
replay.startIndices.push(replay.indices.length);
const skippedFeatHash = {};
skippedFeatHash[getUid(feature2)] = true;
replay.drawReplay(gl, context, skippedFeatHash, false);
expect(replay.setFillStyle_.calledOnce).to.be(true);
expect(replay.setStrokeStyle_.calledOnce).to.be(true);
expect(replay.drawElements.calledTwice).to.be(true);
});
});
});

View File

@@ -1,207 +0,0 @@
import MultiPoint from '../../../../../src/ol/geom/MultiPoint.js';
import Point from '../../../../../src/ol/geom/Point.js';
import WebGLImageReplay from '../../../../../src/ol/render/webgl/ImageReplay.js';
import ImageStyle from '../../../../../src/ol/style/Image.js';
describe('ol.render.webgl.ImageReplay', function() {
let replay;
const createImageStyle = function(image) {
const imageStyle = new ImageStyle({
opacity: 0.1,
rotateWithView: true,
rotation: 1.5,
scale: 2.0
});
imageStyle.getAnchor = function() {
return [0.5, 1];
};
imageStyle.getImage = function() {
return image;
};
imageStyle.getHitDetectionImage = function() {
return image;
};
imageStyle.getImageSize = function() {
return [512, 512];
};
imageStyle.getHitDetectionImageSize = function() {
return [512, 512];
};
imageStyle.getOrigin = function() {
return [200, 200];
};
imageStyle.getSize = function() {
return [256, 256];
};
return imageStyle;
};
beforeEach(function() {
const tolerance = 0.1;
const maxExtent = [-10000, -20000, 10000, 20000];
replay = new WebGLImageReplay(tolerance, maxExtent);
});
describe('#setImageStyle', function() {
let imageStyle1, imageStyle2;
beforeEach(function() {
imageStyle1 = createImageStyle(new Image());
imageStyle2 = createImageStyle(new Image());
});
it('set expected states', function() {
replay.setImageStyle(imageStyle1);
expect(replay.anchorX).to.be(0.5);
expect(replay.anchorY).to.be(1);
expect(replay.height).to.be(256);
expect(replay.imageHeight).to.be(512);
expect(replay.imageWidth).to.be(512);
expect(replay.opacity).to.be(0.1);
expect(replay.originX).to.be(200);
expect(replay.originY).to.be(200);
expect(replay.rotation).to.be(1.5);
expect(replay.rotateWithView).to.be(true);
expect(replay.scale).to.be(2.0);
expect(replay.width).to.be(256);
expect(replay.images_).to.have.length(1);
expect(replay.groupIndices).to.have.length(0);
expect(replay.hitDetectionImages_).to.have.length(1);
expect(replay.hitDetectionGroupIndices).to.have.length(0);
replay.setImageStyle(imageStyle1);
expect(replay.images_).to.have.length(1);
expect(replay.groupIndices).to.have.length(0);
expect(replay.hitDetectionImages_).to.have.length(1);
expect(replay.hitDetectionGroupIndices).to.have.length(0);
replay.setImageStyle(imageStyle2);
expect(replay.images_).to.have.length(2);
expect(replay.groupIndices).to.have.length(1);
expect(replay.hitDetectionImages_).to.have.length(2);
expect(replay.hitDetectionGroupIndices).to.have.length(1);
});
});
describe('#drawPoint', function() {
beforeEach(function() {
const imageStyle = createImageStyle(new Image());
replay.setImageStyle(imageStyle);
});
it('sets the buffer data', function() {
let point;
point = new Point([1000, 2000]);
replay.drawPoint(point, null);
expect(replay.vertices).to.have.length(32);
expect(replay.indices).to.have.length(6);
expect(replay.indices[0]).to.be(0);
expect(replay.indices[1]).to.be(1);
expect(replay.indices[2]).to.be(2);
expect(replay.indices[3]).to.be(0);
expect(replay.indices[4]).to.be(2);
expect(replay.indices[5]).to.be(3);
point = new Point([2000, 3000]);
replay.drawPoint(point, null);
expect(replay.vertices).to.have.length(64);
expect(replay.indices).to.have.length(12);
expect(replay.indices[6]).to.be(4);
expect(replay.indices[7]).to.be(5);
expect(replay.indices[8]).to.be(6);
expect(replay.indices[9]).to.be(4);
expect(replay.indices[10]).to.be(6);
expect(replay.indices[11]).to.be(7);
});
});
describe('#drawMultiPoint', function() {
beforeEach(function() {
const imageStyle = createImageStyle(new Image());
replay.setImageStyle(imageStyle);
});
it('sets the buffer data', function() {
let multiPoint;
multiPoint = new MultiPoint(
[[1000, 2000], [2000, 3000]]);
replay.drawMultiPoint(multiPoint, null);
expect(replay.vertices).to.have.length(64);
expect(replay.indices).to.have.length(12);
expect(replay.indices[0]).to.be(0);
expect(replay.indices[1]).to.be(1);
expect(replay.indices[2]).to.be(2);
expect(replay.indices[3]).to.be(0);
expect(replay.indices[4]).to.be(2);
expect(replay.indices[5]).to.be(3);
expect(replay.indices[6]).to.be(4);
expect(replay.indices[7]).to.be(5);
expect(replay.indices[8]).to.be(6);
expect(replay.indices[9]).to.be(4);
expect(replay.indices[10]).to.be(6);
expect(replay.indices[11]).to.be(7);
multiPoint = new MultiPoint(
[[3000, 4000], [4000, 5000]]);
replay.drawMultiPoint(multiPoint, null);
expect(replay.vertices).to.have.length(128);
expect(replay.indices).to.have.length(24);
expect(replay.indices[12]).to.be(8);
expect(replay.indices[13]).to.be(9);
expect(replay.indices[14]).to.be(10);
expect(replay.indices[15]).to.be(8);
expect(replay.indices[16]).to.be(10);
expect(replay.indices[17]).to.be(11);
expect(replay.indices[18]).to.be(12);
expect(replay.indices[19]).to.be(13);
expect(replay.indices[20]).to.be(14);
expect(replay.indices[21]).to.be(12);
expect(replay.indices[22]).to.be(14);
expect(replay.indices[23]).to.be(15);
});
});
describe('#getTextures', function() {
beforeEach(function() {
replay.textures_ = [1, 2];
replay.hitDetectionTextures_ = [3, 4];
});
it('returns the textures', function() {
const textures = replay.getTextures();
expect(textures).to.have.length(2);
expect(textures[0]).to.be(1);
expect(textures[1]).to.be(2);
});
it('can additionally return the hit detection textures', function() {
const textures = replay.getTextures(true);
expect(textures).to.have.length(4);
expect(textures[0]).to.be(1);
expect(textures[1]).to.be(2);
expect(textures[2]).to.be(3);
expect(textures[3]).to.be(4);
});
});
describe('#getHitDetectionTextures', function() {
beforeEach(function() {
replay.textures_ = [1, 2];
replay.hitDetectionTextures_ = [3, 4];
});
it('returns the hit detection textures', function() {
const textures = replay.getHitDetectionTextures();
expect(textures).to.have.length(2);
expect(textures[0]).to.be(3);
expect(textures[1]).to.be(4);
});
});
});

View File

@@ -1,272 +0,0 @@
import Feature from '../../../../../src/ol/Feature.js';
import Circle from '../../../../../src/ol/geom/Circle.js';
import GeometryCollection from '../../../../../src/ol/geom/GeometryCollection.js';
import LineString from '../../../../../src/ol/geom/LineString.js';
import MultiLineString from '../../../../../src/ol/geom/MultiLineString.js';
import MultiPoint from '../../../../../src/ol/geom/MultiPoint.js';
import MultiPolygon from '../../../../../src/ol/geom/MultiPolygon.js';
import Point from '../../../../../src/ol/geom/Point.js';
import Polygon from '../../../../../src/ol/geom/Polygon.js';
import WebGLCircleReplay from '../../../../../src/ol/render/webgl/CircleReplay.js';
import WebGLImageReplay from '../../../../../src/ol/render/webgl/ImageReplay.js';
import WebGLImmediateRenderer from '../../../../../src/ol/render/webgl/Immediate.js';
import WebGLLineStringReplay from '../../../../../src/ol/render/webgl/LineStringReplay.js';
import WebGLPolygonReplay from '../../../../../src/ol/render/webgl/PolygonReplay.js';
import CircleStyle from '../../../../../src/ol/style/Circle.js';
import Fill from '../../../../../src/ol/style/Fill.js';
import Stroke from '../../../../../src/ol/style/Stroke.js';
import Style from '../../../../../src/ol/style/Style.js';
describe('ol.render.webgl.Immediate', function() {
let context, style, circle, line, multiLine, point, multiPoint, polygon, multiPolygon;
beforeEach(function() {
context = new WebGLImmediateRenderer({}, [0, 0], 0, 0, [0, 0], [-180, -90, 180, 90], 1);
style = new Style({
image: new CircleStyle(),
fill: new Fill(),
stroke: new Stroke()
});
circle = new Circle([0, 0], 5);
line = new LineString([[0, 0], [5, 5]]);
multiLine = new MultiLineString([[[0, 0], [5, 5]]]);
point = new Point([0, 0]);
multiPoint = new MultiPoint([[0, 0]]);
polygon = new Polygon([[[0, 0], [5, 5], [5, 0], [0, 0]]]);
multiPolygon = new MultiPolygon([[[[0, 0], [5, 5], [5, 0], [0, 0]]]]);
});
describe('#setStyle', function() {
it('sets the style of the context', function() {
context.setStyle(style);
expect(context.fillStyle_).to.be(style.getFill());
expect(context.strokeStyle_).to.be(style.getStroke());
expect(context.imageStyle_).to.be(style.getImage());
});
});
describe('#drawFeature', function() {
let feat;
beforeEach(function() {
feat = new Feature({
geometry: circle
});
context.setStyle = function() {};
context.drawGeometry = function() {};
sinon.spy(context, 'setStyle');
sinon.spy(context, 'drawGeometry');
});
it('updates the style of the context', function() {
context.drawFeature(feat, style);
expect(context.setStyle.calledOnce).to.be(true);
});
it('draws the geometry of the feature', function() {
context.drawFeature(feat, style);
expect(context.drawGeometry.calledOnce).to.be(true);
});
it('does nothing if no geometry is provided', function() {
feat = new Feature();
context.drawFeature(feat, style);
expect(context.setStyle.called).to.be(false);
expect(context.drawGeometry.called).to.be(false);
});
it('does nothing if geometry is out of bounds', function() {
feat = new Feature({
geometry: new Circle([540, 540], 1)
});
context.drawFeature(feat, style);
expect(context.setStyle.called).to.be(false);
expect(context.drawGeometry.called).to.be(false);
});
});
describe('#drawGeometryCollection', function() {
let geomColl;
beforeEach(function() {
geomColl = new GeometryCollection([circle, point, multiPoint,
line, multiLine, polygon, multiPolygon]);
});
it('draws every geometry in the collection', function() {
context.drawGeometry = function() {};
sinon.spy(context, 'drawGeometry');
context.drawGeometryCollection(geomColl);
expect(context.drawGeometry.callCount).to.be(7);
});
});
describe('geometry functions', function() {
function mock(ctor, geomFunc) {
const tmpObj = {};
tmpObj.replay = ctor.prototype.replay;
ctor.prototype.replay = sinon.spy();
tmpObj.finish = ctor.prototype.finish;
ctor.prototype.finish = sinon.spy();
tmpObj.getDeleteResourcesFunction = ctor.prototype.getDeleteResourcesFunction;
ctor.prototype.getDeleteResourcesFunction = sinon.spy(function() {
return function() {};
});
sinon.spy(ctor.prototype.getDeleteResourcesFunction);
if (ctor === WebGLImageReplay) {
tmpObj.setImageStyle = ctor.prototype.setImageStyle;
ctor.prototype.setImageStyle = sinon.spy();
} else {
tmpObj.setFillStrokeStyle = ctor.prototype.setFillStrokeStyle;
ctor.prototype.setFillStrokeStyle = sinon.spy();
}
tmpObj[geomFunc] = ctor.prototype[geomFunc];
ctor.prototype[geomFunc] = sinon.spy();
return tmpObj;
}
function restore(ctor, tmpObj) {
for (const i in tmpObj) {
ctor.prototype[i] = tmpObj[i];
}
}
describe('#drawPoint', function() {
let tmpObj;
beforeEach(function() {
tmpObj = mock(WebGLImageReplay, 'drawPoint');
});
it('draws a point', function() {
context.drawGeometry(point);
expect(WebGLImageReplay.prototype.setImageStyle.calledOnce).to.be(true);
expect(WebGLImageReplay.prototype.drawPoint.calledOnce).to.be(true);
expect(WebGLImageReplay.prototype.finish.calledOnce).to.be(true);
expect(WebGLImageReplay.prototype.replay.calledOnce).to.be(true);
expect(WebGLImageReplay.prototype.getDeleteResourcesFunction.calledOnce).to.be(true);
});
after(function() {
restore(WebGLImageReplay, tmpObj);
});
});
describe('#drawMultiPoint', function() {
let tmpObj;
beforeEach(function() {
tmpObj = mock(WebGLImageReplay, 'drawMultiPoint');
});
it('draws a multi point', function() {
context.drawGeometry(multiPoint);
expect(WebGLImageReplay.prototype.setImageStyle.calledOnce).to.be(true);
expect(WebGLImageReplay.prototype.drawMultiPoint.calledOnce).to.be(true);
expect(WebGLImageReplay.prototype.finish.calledOnce).to.be(true);
expect(WebGLImageReplay.prototype.replay.calledOnce).to.be(true);
expect(WebGLImageReplay.prototype.getDeleteResourcesFunction.calledOnce).to.be(true);
});
after(function() {
restore(WebGLImageReplay, tmpObj);
});
});
describe('#drawLineString', function() {
let tmpObj;
beforeEach(function() {
tmpObj = mock(WebGLLineStringReplay, 'drawLineString');
});
it('draws a line string', function() {
context.drawGeometry(line);
expect(WebGLLineStringReplay.prototype.setFillStrokeStyle.calledOnce).to.be(true);
expect(WebGLLineStringReplay.prototype.drawLineString.calledOnce).to.be(true);
expect(WebGLLineStringReplay.prototype.finish.calledOnce).to.be(true);
expect(WebGLLineStringReplay.prototype.replay.calledOnce).to.be(true);
expect(WebGLLineStringReplay.prototype.getDeleteResourcesFunction.calledOnce).to.be(true);
});
after(function() {
restore(WebGLLineStringReplay, tmpObj);
});
});
describe('#drawMultiLineString', function() {
let tmpObj;
beforeEach(function() {
tmpObj = mock(WebGLLineStringReplay, 'drawMultiLineString');
});
it('draws a multi line string', function() {
context.drawGeometry(multiLine);
expect(WebGLLineStringReplay.prototype.setFillStrokeStyle.calledOnce).to.be(true);
expect(WebGLLineStringReplay.prototype.drawMultiLineString.calledOnce).to.be(true);
expect(WebGLLineStringReplay.prototype.finish.calledOnce).to.be(true);
expect(WebGLLineStringReplay.prototype.replay.calledOnce).to.be(true);
expect(WebGLLineStringReplay.prototype.getDeleteResourcesFunction.calledOnce).to.be(true);
});
after(function() {
restore(WebGLLineStringReplay, tmpObj);
});
});
describe('#drawPolygon', function() {
let tmpObj;
beforeEach(function() {
tmpObj = mock(WebGLPolygonReplay, 'drawPolygon');
});
it('draws a polygon', function() {
context.drawGeometry(polygon);
expect(WebGLPolygonReplay.prototype.setFillStrokeStyle.calledOnce).to.be(true);
expect(WebGLPolygonReplay.prototype.drawPolygon.calledOnce).to.be(true);
expect(WebGLPolygonReplay.prototype.finish.calledOnce).to.be(true);
expect(WebGLPolygonReplay.prototype.replay.calledOnce).to.be(true);
expect(WebGLPolygonReplay.prototype.getDeleteResourcesFunction.calledOnce).to.be(true);
});
after(function() {
restore(WebGLPolygonReplay, tmpObj);
});
});
describe('#drawMultiPolygon', function() {
let tmpObj;
beforeEach(function() {
tmpObj = mock(WebGLPolygonReplay, 'drawMultiPolygon');
});
it('draws a multi polygon', function() {
context.drawGeometry(multiPolygon);
expect(WebGLPolygonReplay.prototype.setFillStrokeStyle.calledOnce).to.be(true);
expect(WebGLPolygonReplay.prototype.drawMultiPolygon.calledOnce).to.be(true);
expect(WebGLPolygonReplay.prototype.finish.calledOnce).to.be(true);
expect(WebGLPolygonReplay.prototype.replay.calledOnce).to.be(true);
expect(WebGLPolygonReplay.prototype.getDeleteResourcesFunction.calledOnce).to.be(true);
});
after(function() {
restore(WebGLPolygonReplay, tmpObj);
});
});
describe('#drawCircle', function() {
let tmpObj;
beforeEach(function() {
tmpObj = mock(WebGLCircleReplay, 'drawCircle');
});
it('draws a circle', function() {
context.drawGeometry(circle);
expect(WebGLCircleReplay.prototype.setFillStrokeStyle.calledOnce).to.be(true);
expect(WebGLCircleReplay.prototype.drawCircle.calledOnce).to.be(true);
expect(WebGLCircleReplay.prototype.finish.calledOnce).to.be(true);
expect(WebGLCircleReplay.prototype.replay.calledOnce).to.be(true);
expect(WebGLCircleReplay.prototype.getDeleteResourcesFunction.calledOnce).to.be(true);
});
after(function() {
restore(WebGLCircleReplay, tmpObj);
});
});
});
});

View File

@@ -1,61 +0,0 @@
import WebGLReplay from '../../../../../src/ol/render/webgl/Replay.js';
describe('ol.render.Replay', function() {
let replay;
beforeEach(function() {
replay = new WebGLReplay(5, [-180, -90, 180, 90]);
replay.drawReplay = replay.shutDownProgram = function() {};
});
describe('constructor', function() {
it('stores view related data', function() {
expect(replay.tolerance).to.be(5);
expect(replay.maxExtent).to.eql([-180, -90, 180, 90]);
expect(replay.origin).to.eql([0, 0]);
});
it ('sets up the required matrices', function() {
const mat3 = [1, 0, 0, 1, 0, 0];
const mat4 = [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1];
expect(replay.projectionMatrix_).to.eql(mat3);
expect(replay.offsetRotateMatrix_).to.eql(mat3);
expect(replay.offsetScaleMatrix_).to.eql(mat3);
expect(replay.tmpMat4_).to.eql(mat4);
});
});
describe('#replay', function() {
const gl = {
uniformMatrix4fv: function() {},
uniform1f: function() {}
};
const context = {
bindBuffer: function() {},
getGL: function() {
return gl;
}
};
beforeEach(function() {
replay.setUpProgram = function() {
return {
u_projectionMatrix: true,
u_offsetScaleMatrix: true,
u_offsetRotateMatrix: true,
u_opacity: true
};
};
});
it('calculates the correct matrices', function() {
const sin = Math.sin(Math.PI);
replay.replay(context, [0, 0], 10, Math.PI, [10, 10], 1, 0, {}, undefined,
false, undefined);
expect(replay.projectionMatrix_).to.eql([-0.02, -sin * 0.02, sin * 0.02,
-0.02, 0, 0]);
expect(replay.offsetRotateMatrix_).to.eql([-1, -sin, sin, -1, 0, 0]);
expect(replay.offsetScaleMatrix_).to.eql([0.2, 0, 0, 0.2, 0, 0]);
});
});
});

View File

@@ -1,347 +0,0 @@
import {getUid} from '../../../../../src/ol/util.js';
import Feature from '../../../../../src/ol/Feature.js';
import LineString from '../../../../../src/ol/geom/LineString.js';
import MultiLineString from '../../../../../src/ol/geom/MultiLineString.js';
import WebGLLineStringReplay from '../../../../../src/ol/render/webgl/LineStringReplay.js';
import {fragment, vertex} from '../../../../../src/ol/render/webgl/linestringreplay/defaultshader.js';
import Locations from '../../../../../src/ol/render/webgl/linestringreplay/defaultshader/Locations.js';
import Stroke from '../../../../../src/ol/style/Stroke.js';
describe('ol.render.webgl.LineStringReplay', function() {
let replay;
const strokeStyle1 = new Stroke({
color: [0, 255, 0, 0.4]
});
const strokeStyle2 = new Stroke({
color: [255, 0, 0, 1],
lineCap: 'square',
lineJoin: 'miter'
});
beforeEach(function() {
const tolerance = 0.1;
const maxExtent = [-10000, -20000, 10000, 20000];
replay = new WebGLLineStringReplay(tolerance, maxExtent);
});
describe('#setFillStrokeStyle', function() {
it('set expected states', function() {
replay.setFillStrokeStyle(null, strokeStyle1);
expect(replay.state_).not.be(null);
expect(replay.state_.lineCap).to.be('round');
expect(replay.state_.lineJoin).to.be('round');
expect(replay.state_.strokeColor).to.eql([0, 1, 0, 0.4]);
expect(replay.state_.lineWidth).to.be(1);
expect(replay.state_.miterLimit).to.be(10);
expect(replay.state_.changed).to.be(true);
expect(replay.styles_).to.have.length(1);
replay.setFillStrokeStyle(null, strokeStyle2);
expect(replay.state_.lineCap).to.be('square');
expect(replay.state_.lineJoin).to.be('miter');
expect(replay.state_.strokeColor).to.eql([1, 0, 0, 1]);
expect(replay.state_.lineWidth).to.be(1);
expect(replay.state_.miterLimit).to.be(10);
expect(replay.state_.changed).to.be(true);
expect(replay.styles_).to.have.length(2);
});
});
describe('#drawLineString', function() {
it('sets the buffer data', function() {
let linestring;
linestring = new LineString(
[[1000, 2000], [2000, 3000]]);
replay.setFillStrokeStyle(null, strokeStyle1);
replay.drawLineString(linestring, null);
expect(replay.vertices).to.have.length(56);
expect(replay.indices).to.have.length(18);
expect(replay.state_.changed).to.be(false);
expect(replay.startIndices).to.have.length(1);
expect(replay.startIndicesFeature).to.have.length(1);
linestring = new LineString(
[[1000, 3000], [2000, 4000], [3000, 3000]]);
replay.drawLineString(linestring, null);
expect(replay.vertices).to.have.length(140);
expect(replay.indices).to.have.length(48);
expect(replay.state_.changed).to.be(false);
expect(replay.startIndices).to.have.length(2);
expect(replay.startIndicesFeature).to.have.length(2);
});
});
describe('#drawMultiLineString', function() {
it('sets the buffer data', function() {
const multilinestring = new MultiLineString(
[[[1000, 2000], [2000, 3000]],
[[1000, 3000], [2000, 4000], [3000, 3000]]]);
replay.setFillStrokeStyle(null, strokeStyle1);
replay.drawMultiLineString(multilinestring, null);
expect(replay.vertices).to.have.length(140);
expect(replay.indices).to.have.length(48);
expect(replay.state_.changed).to.be(false);
expect(replay.startIndices).to.have.length(1);
expect(replay.startIndicesFeature).to.have.length(1);
});
});
describe('#drawCoordinates_', function() {
it('triangulates linestrings', function() {
const stroke = new Stroke({
color: [0, 255, 0, 1],
lineCap: 'butt',
lineJoin: 'bevel'
});
const linestring = new LineString(
[[1000, 3000], [2000, 4000], [3000, 3000]]);
const flatCoordinates = linestring.getFlatCoordinates();
replay.setFillStrokeStyle(null, stroke);
replay.drawCoordinates_(flatCoordinates, 0,
flatCoordinates.length, 2);
expect(replay.indices).to.eql(
[2, 0, 1, 4, 2, 1,
2, 4, 3,
5, 3, 4, 4, 6, 5]);
});
it('optionally creates miters', function() {
const stroke = new Stroke({
color: [0, 255, 0, 1],
lineCap: 'butt'
});
const linestring = new LineString(
[[1000, 3000], [2000, 4000], [3000, 3000]]);
const flatCoordinates = linestring.getFlatCoordinates();
replay.setFillStrokeStyle(null, stroke);
replay.drawCoordinates_(flatCoordinates, 0,
flatCoordinates.length, 2);
expect(replay.indices).to.eql(
[2, 0, 1, 4, 2, 1,
2, 4, 3, 3, 5, 2,
6, 3, 4, 4, 7, 6]);
});
it('optionally creates caps', function() {
const stroke = new Stroke({
color: [0, 255, 0, 1]
});
const linestring = new LineString(
[[1000, 3000], [2000, 4000], [3000, 3000]]);
const flatCoordinates = linestring.getFlatCoordinates();
replay.setFillStrokeStyle(null, stroke);
replay.drawCoordinates_(flatCoordinates, 0,
flatCoordinates.length, 2);
expect(replay.indices).to.eql(
[2, 0, 1, 1, 3, 2,
4, 2, 3, 6, 4, 3,
4, 6, 5, 5, 7, 4,
8, 5, 6, 6, 9, 8,
10, 8, 9, 9, 11, 10]);
});
it('respects segment orientation', function() {
const stroke = new Stroke({
color: [0, 255, 0, 1],
lineCap: 'butt',
lineJoin: 'bevel'
});
const linestring = new LineString(
[[1000, 3000], [2000, 2000], [3000, 3000]]);
const flatCoordinates = linestring.getFlatCoordinates();
replay.setFillStrokeStyle(null, stroke);
replay.drawCoordinates_(flatCoordinates, 0,
flatCoordinates.length, 2);
expect(replay.indices).to.eql(
[2, 0, 1, 4, 2, 0,
2, 4, 3,
5, 3, 4, 4, 6, 5]);
});
it('closes boundaries', function() {
const stroke = new Stroke({
color: [0, 255, 0, 1],
lineCap: 'butt',
lineJoin: 'bevel'
});
const linestring = new LineString(
[[1000, 3000], [2000, 4000], [3000, 3000], [1000, 3000]]);
const flatCoordinates = linestring.getFlatCoordinates();
replay.setFillStrokeStyle(null, stroke);
replay.drawCoordinates_(flatCoordinates, 0,
flatCoordinates.length, 2);
expect(replay.indices).to.eql(
[0, 2, 1, 3, 1, 2,
5, 3, 2,
3, 5, 4, 6, 4, 5,
8, 6, 5,
6, 8, 7, 9, 7, 8,
10, 9, 8]);
expect(replay.vertices.slice(0, 7)).to.eql(
replay.vertices.slice(-14, -7));
expect(replay.vertices.slice(14, 21)).to.eql(
replay.vertices.slice(-7));
});
});
describe('#setUpProgram', function() {
let context, gl;
beforeEach(function() {
context = {
getProgram: function() {},
useProgram: function() {}
};
gl = {
enableVertexAttribArray: function() {},
vertexAttribPointer: function() {},
uniform1f: function() {},
uniform2fv: function() {},
getUniformLocation: function() {},
getAttribLocation: function() {}
};
});
it('returns the locations used by the shaders', function() {
const locations = replay.setUpProgram(gl, context, [2, 2], 1);
expect(locations).to.be.a(Locations);
});
it('gets and compiles the shaders', function() {
sinon.spy(context, 'getProgram');
sinon.spy(context, 'useProgram');
replay.setUpProgram(gl, context, [2, 2], 1);
expect(context.getProgram.calledWithExactly(fragment, vertex)).to.be(true);
expect(context.useProgram.calledOnce).to.be(true);
});
it('initializes the attrib pointers', function() {
sinon.spy(gl, 'getAttribLocation');
sinon.spy(gl, 'vertexAttribPointer');
sinon.spy(gl, 'enableVertexAttribArray');
replay.setUpProgram(gl, context, [2, 2], 1);
expect(gl.vertexAttribPointer.callCount).to.be(gl.getAttribLocation.callCount);
expect(gl.enableVertexAttribArray.callCount).to.be(
gl.getAttribLocation.callCount);
});
});
describe('#shutDownProgram', function() {
let context, gl;
beforeEach(function() {
context = {
getProgram: function() {},
useProgram: function() {}
};
gl = {
enableVertexAttribArray: function() {},
disableVertexAttribArray: function() {},
vertexAttribPointer: function() {},
uniform1f: function() {},
uniform2fv: function() {},
getUniformLocation: function() {},
getAttribLocation: function() {}
};
});
it('disables the attrib pointers', function() {
sinon.spy(gl, 'getAttribLocation');
sinon.spy(gl, 'disableVertexAttribArray');
const locations = replay.setUpProgram(gl, context, [2, 2], 1);
replay.shutDownProgram(gl, locations);
expect(gl.disableVertexAttribArray.callCount).to.be(
gl.getAttribLocation.callCount);
});
});
describe('#drawReplay', function() {
let gl, context;
const feature1 = new Feature({
geometry: new LineString([[0, 0], [500, 500]])
});
const feature2 = new Feature({
geometry: new LineString([[0, 0], [500, 500]])
});
const feature3 = new Feature({
geometry: new LineString([[0, 0], [500, 500]])
});
beforeEach(function() {
gl = {
enable: function() {},
disable: function() {},
depthMask: function() {},
depthFunc: function() {},
clear: function() {},
getParameter: function() {}
};
context = {};
replay.setStrokeStyle_ = function() {};
replay.drawElements = function() {};
sinon.spy(replay, 'setStrokeStyle_');
sinon.spy(replay, 'drawElements');
sinon.spy(gl, 'clear');
});
it('draws the elements in a single call if they have the same style', function() {
replay.setFillStrokeStyle(null, strokeStyle1);
replay.drawLineString(feature1.getGeometry(), feature1);
replay.setFillStrokeStyle(null, strokeStyle1);
replay.drawLineString(feature2.getGeometry(), feature2);
replay.setFillStrokeStyle(null, strokeStyle1);
replay.drawLineString(feature3.getGeometry(), feature3);
replay.startIndices.push(replay.indices.length);
replay.drawReplay(gl, context, {}, false);
expect(replay.setStrokeStyle_.calledOnce).to.be(true);
expect(replay.drawElements.calledOnce).to.be(true);
expect(gl.clear.called).to.be(true);
});
it('draws the elements in batches if there are multiple styles', function() {
replay.setFillStrokeStyle(null, strokeStyle1);
replay.drawLineString(feature1.getGeometry(), feature1);
replay.setFillStrokeStyle(null, strokeStyle2);
replay.drawLineString(feature2.getGeometry(), feature2);
replay.setFillStrokeStyle(null, strokeStyle1);
replay.drawLineString(feature3.getGeometry(), feature3);
replay.startIndices.push(replay.indices.length);
replay.drawReplay(gl, context, {}, false);
expect(replay.setStrokeStyle_.calledThrice).to.be(true);
expect(replay.drawElements.calledThrice).to.be(true);
expect(gl.clear.called).to.be(true);
});
it('can skip elements if needed', function() {
replay.setFillStrokeStyle(null, strokeStyle1);
replay.drawLineString(feature1.getGeometry(), feature1);
replay.setFillStrokeStyle(null, strokeStyle1);
replay.drawLineString(feature2.getGeometry(), feature2);
replay.setFillStrokeStyle(null, strokeStyle1);
replay.drawLineString(feature3.getGeometry(), feature3);
replay.startIndices.push(replay.indices.length);
const skippedFeatHash = {};
skippedFeatHash[getUid(feature2)] = true;
replay.drawReplay(gl, context, skippedFeatHash, false);
expect(replay.setStrokeStyle_.calledOnce).to.be(true);
expect(replay.drawElements.calledTwice).to.be(true);
expect(gl.clear.called).to.be(true);
});
});
});

View File

@@ -1,467 +0,0 @@
import {getUid} from '../../../../../src/ol/util.js';
import Feature from '../../../../../src/ol/Feature.js';
import MultiPolygon from '../../../../../src/ol/geom/MultiPolygon.js';
import Polygon from '../../../../../src/ol/geom/Polygon.js';
import WebGLPolygonReplay from '../../../../../src/ol/render/webgl/PolygonReplay.js';
import {fragment, vertex} from '../../../../../src/ol/render/webgl/polygonreplay/defaultshader.js';
import Locations from '../../../../../src/ol/render/webgl/polygonreplay/defaultshader/Locations.js';
import LinkedList from '../../../../../src/ol/structs/LinkedList.js';
import RBush from '../../../../../src/ol/structs/RBush.js';
import Fill from '../../../../../src/ol/style/Fill.js';
import Stroke from '../../../../../src/ol/style/Stroke.js';
describe('ol.render.webgl.PolygonReplay', function() {
let replay;
const fillStyle = new Fill({
color: [0, 0, 255, 0.5]
});
const strokeStyle = new Stroke({
color: [0, 255, 0, 0.4]
});
beforeEach(function() {
const tolerance = 0.1;
const maxExtent = [-10000, -20000, 10000, 20000];
replay = new WebGLPolygonReplay(tolerance, maxExtent);
});
describe('#drawPolygon', function() {
beforeEach(function() {
replay.setFillStrokeStyle(fillStyle, strokeStyle);
});
it('sets the buffer data', function() {
const polygon1 = new Polygon(
[[[1000, 2000], [1200, 2000], [1200, 3000]]]
);
replay.drawPolygon(polygon1, null);
expect(replay.vertices).to.have.length(8);
expect(replay.indices).to.have.length(3);
expect(replay.vertices).to.eql([
1000, 2000, 1200, 3000, 1200, 2000, 1000, 2000]);
expect(replay.indices).to.eql([2, 0, 1]);
const polygon2 = new Polygon(
[[[4000, 2000], [4200, 2000], [4200, 3000]]]
);
replay.drawPolygon(polygon2, null);
expect(replay.vertices).to.have.length(16);
expect(replay.indices).to.have.length(6);
expect(replay.vertices).to.eql([
1000, 2000, 1200, 3000, 1200, 2000, 1000, 2000,
4000, 2000, 4200, 3000, 4200, 2000, 4000, 2000
]);
expect(replay.indices).to.eql([2, 0, 1, 6, 4, 5]);
});
});
describe('#drawMultiPolygon', function() {
beforeEach(function() {
replay.setFillStrokeStyle(fillStyle, strokeStyle);
});
it('sets the buffer data', function() {
const multiPolygon = new MultiPolygon([
[[[1000, 2000], [1200, 2000], [1200, 3000]]],
[[[4000, 2000], [4200, 2000], [4200, 3000]]]
]);
replay.drawMultiPolygon(multiPolygon, null);
expect(replay.vertices).to.have.length(16);
expect(replay.indices).to.have.length(6);
expect(replay.vertices).to.eql([
1000, 2000, 1200, 3000, 1200, 2000, 1000, 2000,
4000, 2000, 4200, 3000, 4200, 2000, 4000, 2000
]);
expect(replay.indices).to.eql([2, 0, 1, 6, 4, 5]);
});
});
describe('triangulating functions', function() {
let list, rtree;
beforeEach(function() {
list = new LinkedList();
rtree = new RBush();
});
describe('#createPoint_', function() {
it('creates a WebGL polygon vertex', function() {
const p = replay.createPoint_(1, 1, 1);
expect(p.x).to.be(1);
expect(p.y).to.be(1);
expect(p.i).to.be(1);
expect(p.reflex).to.be(undefined);
});
it('adds the point to the vertex array', function() {
replay.createPoint_(1, 1, 1);
expect(replay.vertices.length).to.be(2);
expect(replay.vertices[0]).to.be(1);
expect(replay.vertices[1]).to.be(1);
});
});
describe('#insertItem_', function() {
let p0, p1;
beforeEach(function() {
p0 = replay.createPoint_(1, 1, 1);
p1 = replay.createPoint_(2, 2, 2);
});
it('creates a WebGL polygon segment', function() {
const seg = replay.insertItem_(p0, p1, list, rtree);
expect(seg.p0).to.be(p0);
expect(seg.p1).to.be(p1);
});
it('inserts the segment into the provided linked list', function() {
const seg = replay.insertItem_(p0, p1, list, rtree);
expect(list.head_.data).to.be(seg);
});
it('inserts the segment into the R-Tree, if provided', function() {
replay.insertItem_(p0, p1, list);
expect(rtree.isEmpty()).to.be(true);
replay.insertItem_(p0, p1, list, rtree);
expect(rtree.isEmpty()).to.be(false);
});
});
describe('#removeItem_', function() {
let s0, s1;
beforeEach(function() {
const p = replay.createPoint_(2, 2, 2);
s0 = replay.insertItem_(replay.createPoint_(1, 1, 1),
p, list, rtree);
s1 = replay.insertItem_(p,
replay.createPoint_(5, 2, 3), list, rtree);
});
it('removes the current item', function() {
replay.removeItem_(s0, s1, list, rtree);
expect(list.head_.data).not.to.be(s1);
expect(rtree.getAll().length).to.be(1);
});
it('updates the preceding segment', function() {
const dataExtent = rtree.getExtent();
replay.removeItem_(s0, s1, list, rtree);
expect(s0.p1).to.be(s1.p1);
expect(rtree.getExtent()).to.eql(dataExtent);
});
});
describe('#getPointsInTriangle_', function() {
let p0, p1, p2, p3;
beforeEach(function() {
p0 = replay.createPoint_(2, 0, 0);
p1 = replay.createPoint_(0, 5, 1);
p2 = replay.createPoint_(2, 3, 2);
p3 = replay.createPoint_(4, 5, 3);
replay.insertItem_(p0, p1, list, rtree);
replay.insertItem_(p1, p2, list, rtree);
replay.insertItem_(p2, p3, list, rtree);
replay.insertItem_(p3, p0, list, rtree);
replay.classifyPoints_(list, rtree, false);
});
it('gets every point in a triangle', function() {
const points = replay.getPointsInTriangle_({x: -3, y: 6}, {x: 7, y: 6},
{x: 2, y: 2}, rtree);
expect(points).to.eql([p1, p2, p3]);
});
it('gets only reflex points in a triangle', function() {
const points = replay.getPointsInTriangle_({x: -3, y: 6}, {x: 7, y: 6},
{x: 2, y: 2}, rtree, true);
expect(points).to.eql([p2]);
});
});
describe('#getIntersections_', function() {
let p0, p1, p2, p3, s0, s1, s2, s3;
beforeEach(function() {
p0 = replay.createPoint_(2, 0, 0);
p1 = replay.createPoint_(0, 5, 1);
p2 = replay.createPoint_(2, 3, 2);
p3 = replay.createPoint_(4, 5, 3);
s0 = replay.insertItem_(p0, p1, list, rtree);
s1 = replay.insertItem_(p1, p2, list, rtree);
s2 = replay.insertItem_(p2, p3, list, rtree);
s3 = replay.insertItem_(p3, p0, list, rtree);
});
it('gets intersecting, but non touching segments', function() {
const segments = replay.getIntersections_({p0: {x: 0, y: 3}, p1: {x: 4, y: 5}},
rtree);
expect(segments).to.eql([s0, s1]);
});
it('gets intersecting and touching segments', function() {
const segments = replay.getIntersections_({p0: {x: 0, y: 3}, p1: {x: 4, y: 5}},
rtree, true);
expect(segments).to.eql([s0, s1, s2, s3]);
});
});
describe('#calculateIntersection_', function() {
const p0 = {x: 0, y: 0};
const p1 = {x: 4, y: 4};
const p2 = {x: 0, y: 4};
const p3 = {x: 4, y: 0};
it('calculates the intersection point of two intersecting segments', function() {
const i = replay.calculateIntersection_(p0, p1, p2, p3);
const t = replay.calculateIntersection_(p0, p1, p1, p2);
expect(i).to.eql([2, 2]);
expect(t).to.be(undefined);
});
it('calculates the intersection point of two touching segments', function() {
const t = replay.calculateIntersection_(p0, p1, p1, p2, true);
expect(t).to.eql([4, 4]);
});
});
describe('#diagonalIsInside_', function() {
let p0, p1, p2, p3;
beforeEach(function() {
p0 = replay.createPoint_(2, 0, 0);
p1 = replay.createPoint_(0, 5, 1);
p2 = replay.createPoint_(2, 3, 2);
p3 = replay.createPoint_(4, 5, 3);
replay.insertItem_(p0, p1, list, rtree);
replay.insertItem_(p1, p2, list, rtree);
replay.insertItem_(p2, p3, list, rtree);
replay.insertItem_(p3, p0, list, rtree);
replay.classifyPoints_(list, rtree, false);
});
it('identifies if diagonal is inside the polygon', function() {
const inside = replay.diagonalIsInside_(p1, p2, p3, p0, p1);
expect(inside).to.be(true);
});
it('identifies if diagonal is outside the polygon', function() {
const inside = replay.diagonalIsInside_(p0, p1, p2, p3, p0);
expect(inside).to.be(false);
});
});
describe('#classifyPoints_', function() {
let p0, p1, p2, p3;
beforeEach(function() {
p0 = replay.createPoint_(2, 0, 0);
p1 = replay.createPoint_(0, 5, 1);
p2 = replay.createPoint_(2, 3, 2);
p3 = replay.createPoint_(4, 5, 3);
replay.insertItem_(p0, p1, list, rtree);
replay.insertItem_(p1, p2, list, rtree);
replay.insertItem_(p2, p3, list, rtree);
replay.insertItem_(p3, p0, list, rtree);
});
it('classifies the points of clockwise polygons', function() {
replay.classifyPoints_(list, rtree, false);
expect(p0.reflex).to.be(false);
expect(p1.reflex).to.be(false);
expect(p2.reflex).to.be(true);
expect(p3.reflex).to.be(false);
});
it('classifies the points of counter-clockwise polygons', function() {
replay.classifyPoints_(list, rtree, true);
expect(p0.reflex).to.be(true);
expect(p1.reflex).to.be(true);
expect(p2.reflex).to.be(false);
expect(p3.reflex).to.be(true);
});
it('removes collinear points', function() {
replay.insertItem_(p3, p0, list, rtree);
replay.classifyPoints_(list, rtree, false);
expect(list.getLength()).to.be(4);
expect(rtree.getAll().length).to.be(4);
});
});
describe('#isSimple_', function() {
let p0, p1, p2, p3;
beforeEach(function() {
p0 = replay.createPoint_(2, 0, 0);
p1 = replay.createPoint_(0, 5, 1);
p2 = replay.createPoint_(2, 3, 2);
p3 = replay.createPoint_(4, 5, 3);
replay.insertItem_(p0, p1, list, rtree);
replay.insertItem_(p1, p2, list, rtree);
replay.insertItem_(p2, p3, list, rtree);
replay.insertItem_(p3, p0, list, rtree);
});
it('identifies simple polygons', function() {
const simple = replay.isSimple_(list, rtree);
expect(simple).to.be(true);
});
it('identifies self-intersecting polygons', function() {
const p4 = replay.createPoint_(2, 5, 4);
const p5 = replay.createPoint_(4, 2, 5);
replay.insertItem_(p0, p4, list, rtree);
replay.insertItem_(p4, p5, list, rtree);
replay.insertItem_(p5, p0, list, rtree);
const simple = replay.isSimple_(list, rtree);
expect(simple).to.be(false);
});
});
});
describe('#setUpProgram', function() {
let context, gl;
beforeEach(function() {
context = {
getProgram: function() {},
useProgram: function() {}
};
gl = {
enableVertexAttribArray: function() {},
vertexAttribPointer: function() {},
uniform1f: function() {},
uniform2fv: function() {},
getUniformLocation: function() {},
getAttribLocation: function() {}
};
});
it('returns the locations used by the shaders', function() {
const locations = replay.setUpProgram(gl, context, [2, 2], 1);
expect(locations).to.be.a(Locations);
});
it('gets and compiles the shaders', function() {
sinon.spy(context, 'getProgram');
sinon.spy(context, 'useProgram');
replay.setUpProgram(gl, context, [2, 2], 1);
expect(context.getProgram.calledWithExactly(fragment, vertex)).to.be(true);
expect(context.useProgram.calledOnce).to.be(true);
});
it('initializes the attrib pointers', function() {
sinon.spy(gl, 'getAttribLocation');
sinon.spy(gl, 'vertexAttribPointer');
sinon.spy(gl, 'enableVertexAttribArray');
replay.setUpProgram(gl, context, [2, 2], 1);
expect(gl.vertexAttribPointer.callCount).to.be(gl.getAttribLocation.callCount);
expect(gl.enableVertexAttribArray.callCount).to.be(
gl.getAttribLocation.callCount);
});
});
describe('#shutDownProgram', function() {
let context, gl;
beforeEach(function() {
context = {
getProgram: function() {},
useProgram: function() {}
};
gl = {
enableVertexAttribArray: function() {},
disableVertexAttribArray: function() {},
vertexAttribPointer: function() {},
uniform1f: function() {},
uniform2fv: function() {},
getUniformLocation: function() {},
getAttribLocation: function() {}
};
});
it('disables the attrib pointers', function() {
sinon.spy(gl, 'getAttribLocation');
sinon.spy(gl, 'disableVertexAttribArray');
const locations = replay.setUpProgram(gl, context, [2, 2], 1);
replay.shutDownProgram(gl, locations);
expect(gl.disableVertexAttribArray.callCount).to.be(
gl.getAttribLocation.callCount);
});
});
describe('#drawReplay', function() {
let gl, context;
const feature1 = new Feature({
geometry: new Polygon([[[0, 0], [500, 500], [500, 0], [0, 0]]])
});
const feature2 = new Feature({
geometry: new Polygon([[[0, 0], [500, 500], [500, 0], [0, 0]]])
});
const feature3 = new Feature({
geometry: new Polygon([[[0, 0], [500, 500], [500, 0], [0, 0]]])
});
beforeEach(function() {
gl = {
getParameter: function() {},
enable: function() {},
disable: function() {},
depthMask: function() {},
depthFunc: function() {},
clear: function() {}
};
context = {};
replay.setFillStyle_ = function() {};
replay.drawElements = function() {};
sinon.spy(replay, 'setFillStyle_');
sinon.spy(replay, 'drawElements');
});
it('draws the elements in a single call if they have the same style', function() {
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawPolygon(feature1.getGeometry(), feature1);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawPolygon(feature2.getGeometry(), feature2);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawPolygon(feature3.getGeometry(), feature3);
replay.startIndices.push(replay.indices.length);
replay.drawReplay(gl, context, {}, false);
expect(replay.setFillStyle_.calledOnce).to.be(true);
expect(replay.drawElements.calledOnce).to.be(true);
});
it('draws the elements in batches if there are multiple fill styles', function() {
const fillStyle2 = new Fill({
color: [0, 255, 0, 1]
});
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawPolygon(feature1.getGeometry(), feature1);
replay.setFillStrokeStyle(fillStyle2, strokeStyle);
replay.drawPolygon(feature2.getGeometry(), feature2);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawPolygon(feature3.getGeometry(), feature3);
replay.startIndices.push(replay.indices.length);
replay.drawReplay(gl, context, {}, false);
expect(replay.setFillStyle_.calledThrice).to.be(true);
expect(replay.drawElements.calledThrice).to.be(true);
});
it('can skip elements if needed', function() {
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawPolygon(feature1.getGeometry(), feature1);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawPolygon(feature2.getGeometry(), feature2);
replay.setFillStrokeStyle(fillStyle, strokeStyle);
replay.drawPolygon(feature3.getGeometry(), feature3);
replay.startIndices.push(replay.indices.length);
const skippedFeatHash = {};
skippedFeatHash[getUid(feature2)] = true;
replay.drawReplay(gl, context, skippedFeatHash, false);
expect(replay.setFillStyle_.calledOnce).to.be(true);
expect(replay.drawElements.calledTwice).to.be(true);
});
});
});

View File

@@ -1,318 +0,0 @@
import {createCanvasContext2D} from '../../../../../src/ol/dom.js';
import Point from '../../../../../src/ol/geom/Point.js';
import WebGLTextReplay from '../../../../../src/ol/render/webgl/TextReplay.js';
import Fill from '../../../../../src/ol/style/Fill.js';
import Stroke from '../../../../../src/ol/style/Stroke.js';
import Text from '../../../../../src/ol/style/Text.js';
describe('ol.render.webgl.TextReplay', function() {
let replay;
const createTextStyle = function(fillStyle, strokeStyle, text) {
const textStyle = new Text({
rotateWithView: true,
rotation: 1.5,
scale: 2,
textAlign: 'left',
textBaseline: 'top',
font: '12px Arial',
offsetX: 10,
offsetY: 10,
text: text,
fill: fillStyle,
stroke: strokeStyle
});
return textStyle;
};
beforeEach(function() {
const tolerance = 0.1;
const maxExtent = [-10000, -20000, 10000, 20000];
replay = new WebGLTextReplay(tolerance, maxExtent);
});
describe('#setTextStyle', function() {
let textStyle1, textStyle2, textStyle3, textStyle4;
beforeEach(function() {
textStyle1 = createTextStyle(
new Fill({
color: [0, 0, 0, 1]
}),
new Stroke({
width: 1,
color: [0, 0, 0, 1],
lineCap: 'butt',
lineJoin: 'bevel',
lineDash: [5, 5],
lineDashOffset: 15,
miterLimit: 2
}),
'someText');
textStyle2 = createTextStyle(
new Fill({
color: [255, 255, 255, 1]
}),
new Stroke({
width: 1,
color: [255, 255, 255, 1]
}),
'someText'
);
textStyle3 = createTextStyle(null, null, 'someText');
textStyle4 = createTextStyle(
new Fill({
color: [0, 0, 0, 1]
}),
new Stroke({
width: 1,
color: [0, 0, 0, 1]
}),
''
);
});
it('set expected states', function() {
replay.setTextStyle(textStyle1);
expect(replay.opacity).to.be(1);
expect(replay.rotation).to.be(1.5);
expect(replay.rotateWithView).to.be(true);
expect(replay.scale).to.be(1);
expect(replay.offsetX_).to.be(10);
expect(replay.offsetY_).to.be(10);
expect(replay.text_).to.be('someText');
expect(Object.keys(replay.atlases_)).to.have.length(1);
expect(replay.state_.fillColor).to.be('rgba(0,0,0,1)');
expect(replay.state_.strokeColor).to.be('rgba(0,0,0,1)');
expect(replay.state_.scale).to.be(2);
expect(replay.state_.lineWidth).to.be(1);
expect(replay.state_.lineJoin).to.be('bevel');
expect(replay.state_.lineCap).to.be('butt');
expect(replay.state_.lineDash).to.eql([5, 5]);
expect(replay.state_.lineDashOffset).to.be(15);
expect(replay.state_.miterLimit).to.be(2);
expect(replay.state_.font).to.be('12px Arial');
replay.setTextStyle(textStyle2);
expect(Object.keys(replay.atlases_)).to.have.length(2);
});
it('does not create an atlas, if an empty text is supplied', function() {
replay.setTextStyle(textStyle4);
expect(replay.text_).to.be('');
expect(Object.keys(replay.atlases_)).to.have.length(0);
});
it('does not create an atlas, if both fill and stroke styles are missing', function() {
replay.setTextStyle(textStyle3);
expect(replay.text_).to.be('');
expect(Object.keys(replay.atlases_)).to.have.length(0);
});
});
describe('#drawText', function() {
beforeEach(function() {
const textStyle = createTextStyle(
new Fill({
color: [0, 0, 0, 1]
}),
null, 'someText');
replay.setTextStyle(textStyle);
});
it('sets the buffer data', function() {
let point;
point = [1000, 2000];
replay.drawText(new Point(point), null);
expect(replay.vertices).to.have.length(256);
expect(replay.indices).to.have.length(48);
point = [2000, 3000];
replay.drawText(new Point(point), null);
expect(replay.vertices).to.have.length(512);
expect(replay.indices).to.have.length(96);
});
it('sets part of its state during drawing', function() {
const point = [1000, 2000];
replay.drawText(new Point(point), null);
const height = replay.currAtlas_.height;
const widths = replay.currAtlas_.width;
const width = widths.t;
const widthX = widths.s + widths.o + widths.m + widths.e + widths.T +
widths.e + widths.x;
const charInfo = replay.currAtlas_.atlas.getInfo('t');
expect(replay.height).to.be(height);
expect(replay.width).to.be(width);
expect(replay.originX).to.be(charInfo.offsetX);
expect(replay.originY).to.be(charInfo.offsetY);
expect(replay.imageHeight).to.be(charInfo.image.height);
expect(replay.imageWidth).to.be(charInfo.image.width);
expect(replay.anchorX).to.be(-widthX - 10);
expect(replay.anchorY).to.be(-10);
});
it('does not draw if text is empty', function() {
replay.text_ = '';
const point = [1000, 2000];
replay.drawText(new Point(point), null);
expect(replay.vertices).to.have.length(0);
expect(replay.indices).to.have.length(0);
});
});
describe('#addCharToAtlas_', function() {
beforeEach(function() {
const textStyle = createTextStyle(
new Fill({
color: [0, 0, 0, 1]
}),
null, 'someText');
replay.setTextStyle(textStyle);
});
it('adds a single character to the current atlas', function() {
const glyphAtlas = replay.currAtlas_.atlas;
let info;
replay.addCharToAtlas_('someText');
info = glyphAtlas.getInfo('someText');
expect(info).to.be(null);
replay.addCharToAtlas_('e');
replay.addCharToAtlas_('x');
info = glyphAtlas.getInfo('e');
expect(info).not.to.be(null);
info = glyphAtlas.getInfo('x');
expect(info).not.to.be(null);
});
it('keeps the atlas and the width dictionary synced', function() {
const glyphAtlas = replay.currAtlas_;
replay.addCharToAtlas_('e');
replay.addCharToAtlas_('x');
expect(Object.keys(glyphAtlas.width)).to.have.length(2);
replay.addCharToAtlas_('someText');
expect(Object.keys(glyphAtlas.width)).to.have.length(2);
});
});
describe('#getTextSize_', function() {
beforeEach(function() {
const textStyle = createTextStyle(
new Fill({
color: [0, 0, 0, 1]
}),
null, 'someText');
textStyle.setScale(1);
replay.setTextStyle(textStyle);
});
it('adds missing characters to the current atlas', function() {
const glyphAtlas = replay.currAtlas_;
let info;
expect(Object.keys(glyphAtlas.width)).to.have.length(0);
replay.getTextSize_(['someText']);
expect(Object.keys(glyphAtlas.width)).to.have.length(7);
info = glyphAtlas.atlas.getInfo('s');
expect(info).not.to.be(null);
info = glyphAtlas.atlas.getInfo('o');
expect(info).not.to.be(null);
info = glyphAtlas.atlas.getInfo('m');
expect(info).not.to.be(null);
info = glyphAtlas.atlas.getInfo('e');
expect(info).not.to.be(null);
info = glyphAtlas.atlas.getInfo('T');
expect(info).not.to.be(null);
info = glyphAtlas.atlas.getInfo('x');
expect(info).not.to.be(null);
info = glyphAtlas.atlas.getInfo('t');
expect(info).not.to.be(null);
});
it('returns the size of the label\'s bounding box in pixels', function() {
let size;
const mCtx = createCanvasContext2D(0, 0);
mCtx.font = '12px Arial';
const width = mCtx.measureText('someText').width;
const width2 = mCtx.measureText('anEvenLongerLine').width;
const height = Math.ceil(mCtx.measureText('M').width * 1.5);
size = replay.getTextSize_(['someText']);
expect(size[0]).to.be.within(width, width + 8);
expect(size[1]).to.be(height);
size = replay.getTextSize_(['someText', 'anEvenLongerLine']);
expect(size[0]).to.be.within(width2, width2 + 16);
expect(size[1]).to.be(height * 2);
});
});
describe('#getAtlas_', function() {
beforeEach(function() {
const textStyle = createTextStyle(
new Fill({
color: [0, 0, 0, 1]
}),
null, 'someText');
replay.setTextStyle(textStyle);
});
it('returns the appropriate atlas for the current state', function() {
const atlas = replay.currAtlas_;
const state = replay.state_;
expect(Object.keys(replay.atlases_)).to.have.length(1);
expect(replay.getAtlas_(state)).to.be(atlas);
expect(Object.keys(replay.atlases_)).to.have.length(1);
});
it('creates a new atlas if it cannot find the one for the current state', function() {
const atlas = replay.currAtlas_;
const state = replay.state_;
state.lineWidth = 50;
expect(Object.keys(replay.atlases_)).to.have.length(1);
expect(replay.getAtlas_(state)).not.to.be(atlas);
expect(Object.keys(replay.atlases_)).to.have.length(2);
});
});
describe('#getTextures', function() {
beforeEach(function() {
replay.textures_ = [1, 2];
});
it('returns the textures', function() {
const textures = replay.getTextures();
expect(textures).to.have.length(2);
expect(textures[0]).to.be(1);
expect(textures[1]).to.be(2);
expect(textures).to.eql(replay.getTextures(true));
});
});
describe('#getHitDetectionTextures', function() {
beforeEach(function() {
replay.textures_ = [1, 2];
});
it('returns the textures', function() {
const textures = replay.getHitDetectionTextures();
expect(textures).to.have.length(2);
expect(textures[0]).to.be(1);
expect(textures[1]).to.be(2);
});
});
});

View File

@@ -1,85 +0,0 @@
import WebGLTextureReplay from '../../../../../src/ol/render/webgl/TextureReplay.js';
import {fragment, vertex} from '../../../../../src/ol/render/webgl/texturereplay/defaultshader.js';
import Locations from '../../../../../src/ol/render/webgl/texturereplay/defaultshader/Locations.js';
describe('ol.render.webgl.TextureReplay', function() {
let replay;
beforeEach(function() {
const tolerance = 0.1;
const maxExtent = [-10000, -20000, 10000, 20000];
replay = new WebGLTextureReplay(tolerance, maxExtent);
});
describe('#setUpProgram', function() {
let context, gl;
beforeEach(function() {
context = {
getProgram: function() {},
useProgram: function() {}
};
gl = {
enableVertexAttribArray: function() {},
vertexAttribPointer: function() {},
uniform1f: function() {},
uniform2fv: function() {},
getUniformLocation: function() {},
getAttribLocation: function() {}
};
});
it('returns the locations used by the shaders', function() {
const locations = replay.setUpProgram(gl, context, [2, 2], 1);
expect(locations).to.be.a(Locations);
});
it('gets and compiles the shaders', function() {
sinon.spy(context, 'getProgram');
sinon.spy(context, 'useProgram');
replay.setUpProgram(gl, context, [2, 2], 1);
expect(context.getProgram.calledWithExactly(fragment, vertex)).to.be(true);
expect(context.useProgram.calledOnce).to.be(true);
});
it('initializes the attrib pointers', function() {
sinon.spy(gl, 'getAttribLocation');
sinon.spy(gl, 'vertexAttribPointer');
sinon.spy(gl, 'enableVertexAttribArray');
replay.setUpProgram(gl, context, [2, 2], 1);
expect(gl.vertexAttribPointer.callCount).to.be(gl.getAttribLocation.callCount);
expect(gl.enableVertexAttribArray.callCount).to.be(
gl.getAttribLocation.callCount);
});
});
describe('#shutDownProgram', function() {
let context, gl;
beforeEach(function() {
context = {
getProgram: function() {},
useProgram: function() {}
};
gl = {
enableVertexAttribArray: function() {},
disableVertexAttribArray: function() {},
vertexAttribPointer: function() {},
uniform1f: function() {},
uniform2fv: function() {},
getUniformLocation: function() {},
getAttribLocation: function() {}
};
});
it('disables the attrib pointers', function() {
sinon.spy(gl, 'getAttribLocation');
sinon.spy(gl, 'disableVertexAttribArray');
const locations = replay.setUpProgram(gl, context, [2, 2], 1);
replay.shutDownProgram(gl, locations);
expect(gl.disableVertexAttribArray.callCount).to.be(
gl.getAttribLocation.callCount);
});
});
});

View File

@@ -1,76 +0,0 @@
import {apply as applyTransform} from '../../../../../src/ol/transform.js';
import Map from '../../../../../src/ol/Map.js';
import ImageLayer from '../../../../../src/ol/layer/Image.js';
import ImageSource from '../../../../../src/ol/source/Image.js';
import WebGLImageLayerRenderer from '../../../../../src/ol/renderer/webgl/ImageLayer.js';
describe('ol.renderer.webgl.ImageLayer', function() {
describe('updateProjectionMatrix_', function() {
let map;
let renderer;
let canvasWidth;
let canvasHeight;
let pixelRatio;
let viewResolution;
let viewRotation;
let viewCenter;
let imageExtent;
beforeEach(function() {
map = new Map({
target: document.createElement('div')
});
const layer = new ImageLayer({
source: new ImageSource({
extent: [0, 0, 1, 1]
})
});
renderer = new WebGLImageLayerRenderer(map.renderer_, layer);
// input params
canvasWidth = 512;
canvasHeight = 256;
pixelRatio = 2;
viewResolution = 10;
viewRotation = 0;
viewCenter = [7680, 3840];
// view extent is 512O, 2560, 10240, 5120
// image size is 1024, 768
// image resolution is 10
imageExtent = [0, 0, 10240, 7680];
});
afterEach(function() {
map.dispose();
});
it('produces a correct matrix', function() {
renderer.updateProjectionMatrix_(canvasWidth, canvasHeight,
pixelRatio, viewCenter, viewResolution, viewRotation, imageExtent);
const matrix = renderer.getProjectionMatrix();
let output = applyTransform(matrix, [-1, -1]);
expect(output[0]).to.eql(-6);
expect(output[1]).to.eql(-6);
output = applyTransform(matrix, [1, -1]);
expect(output[0]).to.eql(2);
expect(output[1]).to.eql(-6);
output = applyTransform(matrix, [-1, 1]);
expect(output[0]).to.eql(-6);
expect(output[1]).to.eql(6);
output = applyTransform(matrix, [1, 1]);
expect(output[0]).to.eql(2);
expect(output[1]).to.eql(6);
output = applyTransform(matrix, [0, 0]);
expect(output[0]).to.eql(-2);
expect(output[1]).to.eql(0);
});
});
});

View File

@@ -1,4 +1,4 @@
import _ol_webgl_Buffer_ from '../../../../src/ol/webgl/Buffer.js';
import WebGLArrayBuffer from '../../../../src/ol/webgl/Buffer';
describe('ol.webgl.Buffer', function() {
@@ -9,7 +9,7 @@ describe('ol.webgl.Buffer', function() {
let b;
beforeEach(function() {
b = new _ol_webgl_Buffer_();
b = new WebGLArrayBuffer();
});
it('constructs an empty instance', function() {
@@ -22,7 +22,7 @@ describe('ol.webgl.Buffer', function() {
let b;
beforeEach(function() {
b = new _ol_webgl_Buffer_([0, 1, 2, 3]);
b = new WebGLArrayBuffer([0, 1, 2, 3]);
});
it('constructs a populated instance', function() {
@@ -37,7 +37,7 @@ describe('ol.webgl.Buffer', function() {
let b;
beforeEach(function() {
b = new _ol_webgl_Buffer_();
b = new WebGLArrayBuffer();
});
describe('getArray', function() {

Some files were not shown because too many files have changed in this diff Show More