planning
All checks were successful
Publish To Prod / deploy_and_publish (push) Successful in 35s

This commit is contained in:
2024-10-14 09:15:30 +02:00
parent bcba00a730
commit 6e64e138e2
21059 changed files with 2317811 additions and 1 deletions

238
node_modules/ol/renderer/webgl/Layer.d.ts generated vendored Normal file
View File

@@ -0,0 +1,238 @@
/**
* An object holding positions both in an index and a vertex buffer.
* @typedef {Object} BufferPositions
* @property {number} vertexPosition Position in the vertex buffer
* @property {number} indexPosition Position in the index buffer
*/
/**
* Pushes a quad (two triangles) based on a point geometry
* @param {Float32Array} instructions Array of render instructions for points.
* @param {number} elementIndex Index from which render instructions will be read.
* @param {Float32Array} vertexBuffer Buffer in the form of a typed array.
* @param {Uint32Array} indexBuffer Buffer in the form of a typed array.
* @param {number} customAttributesCount Amount of custom attributes for each element.
* @param {BufferPositions} [bufferPositions] Buffer write positions; if not specified, positions will be set at 0.
* @return {BufferPositions} New buffer positions where to write next
* @property {number} vertexPosition New position in the vertex buffer where future writes should start.
* @property {number} indexPosition New position in the index buffer where future writes should start.
* @private
*/
export function writePointFeatureToBuffers(instructions: Float32Array, elementIndex: number, vertexBuffer: Float32Array, indexBuffer: Uint32Array, customAttributesCount: number, bufferPositions?: BufferPositions | undefined): BufferPositions;
/**
* Returns a texture of 1x1 pixel, white
* @private
* @return {ImageData} Image data.
*/
export function getBlankImageData(): ImageData;
/**
* Generates a color array based on a numerical id
* Note: the range for each component is 0 to 1 with 256 steps
* @param {number} id Id
* @param {Array<number>} [opt_array] Reusable array
* @return {Array<number>} Color array containing the encoded id
*/
export function colorEncodeId(id: number, opt_array?: number[] | undefined): Array<number>;
/**
* Reads an id from a color-encoded array
* Note: the expected range for each component is 0 to 1 with 256 steps.
* @param {Array<number>} color Color array containing the encoded id
* @return {number} Decoded id
*/
export function colorDecodeId(color: Array<number>): number;
export type WebGLWorkerMessageType = string;
export namespace WebGLWorkerMessageType {
const GENERATE_BUFFERS: string;
}
export default WebGLLayerRenderer;
/**
* An object holding positions both in an index and a vertex buffer.
*/
export type BufferPositions = {
/**
* Position in the vertex buffer
*/
vertexPosition: number;
/**
* Position in the index buffer
*/
indexPosition: number;
};
/**
* This message will trigger the generation of a vertex and an index buffer based on the given render instructions.
* When the buffers are generated, the worked will send a message of the same type to the main thread, with
* the generated buffers in it.
* Note that any addition properties present in the message *will* be sent back to the main thread.
*/
export type WebGLWorkerGenerateBuffersMessage = {
/**
* Message type
*/
type: WebGLWorkerMessageType;
/**
* Render instructions raw binary buffer.
*/
renderInstructions: ArrayBuffer;
/**
* Vertices array raw binary buffer (sent by the worker).
*/
vertexBuffer?: ArrayBuffer | undefined;
/**
* Indices array raw binary buffer (sent by the worker).
*/
indexBuffer?: ArrayBuffer | undefined;
/**
* Amount of custom attributes count in the render instructions.
*/
customAttributesCount?: number | undefined;
};
export type PostProcessesOptions = {
/**
* Scale ratio; if < 1, the post process will render to a texture smaller than
* the main canvas that will then be sampled up (useful for saving resource on blur steps).
*/
scaleRatio?: number | undefined;
/**
* Vertex shader source
*/
vertexShader?: string | undefined;
/**
* Fragment shader source
*/
fragmentShader?: string | undefined;
/**
* Uniform definitions for the post process step
*/
uniforms?: {
[x: string]: import("../../webgl/Helper.js").UniformValue;
} | undefined;
};
export type Options = {
/**
* Uniform definitions for the post process steps
*/
uniforms?: {
[x: string]: import("../../webgl/Helper.js").UniformValue;
} | undefined;
/**
* Post-processes definitions
*/
postProcesses?: PostProcessesOptions[] | undefined;
};
/**
* @typedef {Object} WebGLWorkerGenerateBuffersMessage
* This message will trigger the generation of a vertex and an index buffer based on the given render instructions.
* When the buffers are generated, the worked will send a message of the same type to the main thread, with
* the generated buffers in it.
* Note that any addition properties present in the message *will* be sent back to the main thread.
* @property {WebGLWorkerMessageType} type Message type
* @property {ArrayBuffer} renderInstructions Render instructions raw binary buffer.
* @property {ArrayBuffer} [vertexBuffer] Vertices array raw binary buffer (sent by the worker).
* @property {ArrayBuffer} [indexBuffer] Indices array raw binary buffer (sent by the worker).
* @property {number} [customAttributesCount] Amount of custom attributes count in the render instructions.
*/
/**
* @typedef {Object} PostProcessesOptions
* @property {number} [scaleRatio] Scale ratio; if < 1, the post process will render to a texture smaller than
* the main canvas that will then be sampled up (useful for saving resource on blur steps).
* @property {string} [vertexShader] Vertex shader source
* @property {string} [fragmentShader] Fragment shader source
* @property {Object<string,import("../../webgl/Helper").UniformValue>} [uniforms] Uniform definitions for the post process step
*/
/**
* @typedef {Object} Options
* @property {Object<string,import("../../webgl/Helper").UniformValue>} [uniforms] Uniform definitions for the post process steps
* @property {Array<PostProcessesOptions>} [postProcesses] Post-processes definitions
*/
/**
* @classdesc
* Base WebGL renderer class.
* Holds all logic related to data manipulation & some common rendering logic
* @template {import("../../layer/Layer.js").default} LayerType
* @extends {LayerRenderer<LayerType>}
*/
declare class WebGLLayerRenderer<LayerType extends import("../../layer/Layer.js").default<import("../../source/Source.js").default, LayerRenderer<any>>> extends LayerRenderer<LayerType> {
/**
* @param {LayerType} layer Layer.
* @param {Options} [opt_options] Options.
*/
constructor(layer: LayerType, opt_options?: Options | undefined);
/**
* The transform for viewport CSS pixels to rendered pixels. This transform is only
* set before dispatching rendering events.
* @private
* @type {import("../../transform.js").Transform}
*/
private inversePixelTransform_;
/**
* @private
* @type {CanvasRenderingContext2D}
*/
private pixelContext_;
/**
* @private
*/
private postProcesses_;
/**
* @private
*/
private uniforms_;
/**
* @type {WebGLHelper}
* @protected
*/
protected helper: WebGLHelper;
/**
* @param {WebGLRenderingContext} context The WebGL rendering context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @protected
*/
protected dispatchPreComposeEvent(context: WebGLRenderingContext, frameState: import("../../PluggableMap.js").FrameState): void;
/**
* @param {WebGLRenderingContext} context The WebGL rendering context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @protected
*/
protected dispatchPostComposeEvent(context: WebGLRenderingContext, frameState: import("../../PluggableMap.js").FrameState): void;
/**
* Reset options (only handles uniforms).
* @param {Options} options Options.
*/
reset(options: Options): void;
/**
* @protected
*/
protected removeHelper(): void;
/**
* @protected
*/
protected afterHelperCreated(): void;
/**
* Determine whether renderFrame should be called.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {boolean} Layer is ready to be rendered.
* @protected
*/
protected prepareFrameInternal(frameState: import("../../PluggableMap.js").FrameState): boolean;
/**
* @param {import("../../render/EventType.js").default} type Event type.
* @param {WebGLRenderingContext} context The rendering context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @private
*/
private dispatchRenderEvent_;
/**
* @param {WebGLRenderingContext} context The rendering context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @protected
*/
protected preRender(context: WebGLRenderingContext, frameState: import("../../PluggableMap.js").FrameState): void;
/**
* @param {WebGLRenderingContext} context The rendering context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @protected
*/
protected postRender(context: WebGLRenderingContext, frameState: import("../../PluggableMap.js").FrameState): void;
}
import LayerRenderer from "../Layer.js";
import WebGLHelper from "../../webgl/Helper.js";
//# sourceMappingURL=Layer.d.ts.map

1
node_modules/ol/renderer/webgl/Layer.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"Layer.d.ts","sourceRoot":"","sources":["../../src/renderer/webgl/Layer.js"],"names":[],"mappings":"AAkWA;;;;;GAKG;AAEH;;;;;;;;;;;;GAYG;AACH,yDAXW,YAAY,gBACZ,MAAM,gBACN,YAAY,eACZ,WAAW,yBACX,MAAM,kDAEL,eAAe,CAgE1B;AAED;;;;GAIG;AACH,qCAFY,SAAS,CAUpB;AAED;;;;;;GAMG;AACH,kCAJW,MAAM,qCAEL,MAAM,MAAM,CAAC,CAWxB;AAED;;;;;GAKG;AACH,qCAHW,MAAM,MAAM,CAAC,GACZ,MAAM,CAWjB;qCAndS,MAAM;;;;;;;;;;;;oBAqVF,MAAM;;;;mBACN,MAAM;;;;;;;;;;;;UA1UN,sBAAsB;;;;wBACtB,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAPzB;;;;;;;;;;;GAWG;AAEH;;;;;;;GAOG;AAEH;;;;GAIG;AAEH;;;;;;GAMG;AACH;IACE;;;OAGG;IACH,mBAHW,SAAS,qCA0CnB;IAlCC;;;;;OAKG;IACH,+BAA+C;IAE/C;;;OAGG;IACH,sBAAyB;IAEzB;;OAEG;IACH,uBAA2C;IAE3C;;OAEG;IACH,kBAAiC;IAEjC;;;OAGG;IACH,kBAHU,WAAW,CAGV;IAQb;;;;OAIG;IACH,2CAJW,qBAAqB,cACrB,OAAO,uBAAuB,EAAE,UAAU,QAcpD;IAED;;;;OAIG;IACH,4CAJW,qBAAqB,cACrB,OAAO,uBAAuB,EAAE,UAAU,QAcpD;IAED;;;OAGG;IACH,eAFW,OAAO,QAOjB;IAED;;OAEG;IACH,+BAKC;IAqDD;;OAEG;IACH,qCAAuB;IAEvB;;;;;OAKG;IACH,2CAJW,OAAO,uBAAuB,EAAE,UAAU,GACzC,OAAO,CAKlB;IAUD;;;;;OAKG;IACH,6BAsBC;IAED;;;;OAIG;IACH,6BAJW,qBAAqB,cACrB,OAAO,uBAAuB,EAAE,UAAU,QAKpD;IAED;;;;OAIG;IACH,8BAJW,qBAAqB,cACrB,OAAO,uBAAuB,EAAE,UAAU,QAKpD;CA+DF"}

410
node_modules/ol/renderer/webgl/Layer.js generated vendored Normal file
View File

@@ -0,0 +1,410 @@
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
/**
* @module ol/renderer/webgl/Layer
*/
import LayerProperty from '../../layer/Property.js';
import LayerRenderer from '../Layer.js';
import RenderEvent from '../../render/Event.js';
import RenderEventType from '../../render/EventType.js';
import WebGLHelper from '../../webgl/Helper.js';
import { apply as applyTransform, compose as composeTransform, create as createTransform, } from '../../transform.js';
import { containsCoordinate } from '../../extent.js';
/**
* @enum {string}
*/
export var WebGLWorkerMessageType = {
GENERATE_BUFFERS: 'GENERATE_BUFFERS',
};
/**
* @typedef {Object} WebGLWorkerGenerateBuffersMessage
* This message will trigger the generation of a vertex and an index buffer based on the given render instructions.
* When the buffers are generated, the worked will send a message of the same type to the main thread, with
* the generated buffers in it.
* Note that any addition properties present in the message *will* be sent back to the main thread.
* @property {WebGLWorkerMessageType} type Message type
* @property {ArrayBuffer} renderInstructions Render instructions raw binary buffer.
* @property {ArrayBuffer} [vertexBuffer] Vertices array raw binary buffer (sent by the worker).
* @property {ArrayBuffer} [indexBuffer] Indices array raw binary buffer (sent by the worker).
* @property {number} [customAttributesCount] Amount of custom attributes count in the render instructions.
*/
/**
* @typedef {Object} PostProcessesOptions
* @property {number} [scaleRatio] Scale ratio; if < 1, the post process will render to a texture smaller than
* the main canvas that will then be sampled up (useful for saving resource on blur steps).
* @property {string} [vertexShader] Vertex shader source
* @property {string} [fragmentShader] Fragment shader source
* @property {Object<string,import("../../webgl/Helper").UniformValue>} [uniforms] Uniform definitions for the post process step
*/
/**
* @typedef {Object} Options
* @property {Object<string,import("../../webgl/Helper").UniformValue>} [uniforms] Uniform definitions for the post process steps
* @property {Array<PostProcessesOptions>} [postProcesses] Post-processes definitions
*/
/**
* @classdesc
* Base WebGL renderer class.
* Holds all logic related to data manipulation & some common rendering logic
* @template {import("../../layer/Layer.js").default} LayerType
* @extends {LayerRenderer<LayerType>}
*/
var WebGLLayerRenderer = /** @class */ (function (_super) {
__extends(WebGLLayerRenderer, _super);
/**
* @param {LayerType} layer Layer.
* @param {Options} [opt_options] Options.
*/
function WebGLLayerRenderer(layer, opt_options) {
var _this = _super.call(this, layer) || this;
var options = opt_options || {};
/**
* The transform for viewport CSS pixels to rendered pixels. This transform is only
* set before dispatching rendering events.
* @private
* @type {import("../../transform.js").Transform}
*/
_this.inversePixelTransform_ = createTransform();
/**
* @private
* @type {CanvasRenderingContext2D}
*/
_this.pixelContext_ = null;
/**
* @private
*/
_this.postProcesses_ = options.postProcesses;
/**
* @private
*/
_this.uniforms_ = options.uniforms;
/**
* @type {WebGLHelper}
* @protected
*/
_this.helper;
layer.addChangeListener(LayerProperty.MAP, _this.removeHelper.bind(_this));
_this.dispatchPreComposeEvent = _this.dispatchPreComposeEvent.bind(_this);
_this.dispatchPostComposeEvent = _this.dispatchPostComposeEvent.bind(_this);
return _this;
}
/**
* @param {WebGLRenderingContext} context The WebGL rendering context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @protected
*/
WebGLLayerRenderer.prototype.dispatchPreComposeEvent = function (context, frameState) {
var layer = this.getLayer();
if (layer.hasListener(RenderEventType.PRECOMPOSE)) {
var event_1 = new RenderEvent(RenderEventType.PRECOMPOSE, undefined, frameState, context);
layer.dispatchEvent(event_1);
}
};
/**
* @param {WebGLRenderingContext} context The WebGL rendering context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @protected
*/
WebGLLayerRenderer.prototype.dispatchPostComposeEvent = function (context, frameState) {
var layer = this.getLayer();
if (layer.hasListener(RenderEventType.POSTCOMPOSE)) {
var event_2 = new RenderEvent(RenderEventType.POSTCOMPOSE, undefined, frameState, context);
layer.dispatchEvent(event_2);
}
};
/**
* Reset options (only handles uniforms).
* @param {Options} options Options.
*/
WebGLLayerRenderer.prototype.reset = function (options) {
this.uniforms_ = options.uniforms;
if (this.helper) {
this.helper.setUniforms(this.uniforms_);
}
};
/**
* @protected
*/
WebGLLayerRenderer.prototype.removeHelper = function () {
if (this.helper) {
this.helper.dispose();
delete this.helper;
}
};
/**
* Determine whether renderFrame should be called.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {boolean} Layer is ready to be rendered.
*/
WebGLLayerRenderer.prototype.prepareFrame = function (frameState) {
if (this.getLayer().getRenderSource()) {
var incrementGroup = true;
var groupNumber = -1;
var className = void 0;
for (var i = 0, ii = frameState.layerStatesArray.length; i < ii; i++) {
var layer = frameState.layerStatesArray[i].layer;
var renderer = layer.getRenderer();
if (!(renderer instanceof WebGLLayerRenderer)) {
incrementGroup = true;
continue;
}
var layerClassName = layer.getClassName();
if (incrementGroup || layerClassName !== className) {
groupNumber += 1;
incrementGroup = false;
}
className = layerClassName;
if (renderer === this) {
break;
}
}
var canvasCacheKey = 'map/' + frameState.mapId + '/group/' + groupNumber;
if (!this.helper || !this.helper.canvasCacheKeyMatches(canvasCacheKey)) {
this.removeHelper();
this.helper = new WebGLHelper({
postProcesses: this.postProcesses_,
uniforms: this.uniforms_,
canvasCacheKey: canvasCacheKey,
});
if (className) {
this.helper.getCanvas().className = className;
}
this.afterHelperCreated();
}
}
return this.prepareFrameInternal(frameState);
};
/**
* @protected
*/
WebGLLayerRenderer.prototype.afterHelperCreated = function () { };
/**
* Determine whether renderFrame should be called.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {boolean} Layer is ready to be rendered.
* @protected
*/
WebGLLayerRenderer.prototype.prepareFrameInternal = function (frameState) {
return true;
};
/**
* Clean up.
*/
WebGLLayerRenderer.prototype.disposeInternal = function () {
this.removeHelper();
_super.prototype.disposeInternal.call(this);
};
/**
* @param {import("../../render/EventType.js").default} type Event type.
* @param {WebGLRenderingContext} context The rendering context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @private
*/
WebGLLayerRenderer.prototype.dispatchRenderEvent_ = function (type, context, frameState) {
var layer = this.getLayer();
if (layer.hasListener(type)) {
composeTransform(this.inversePixelTransform_, 0, 0, frameState.pixelRatio, -frameState.pixelRatio, 0, 0, -frameState.size[1]);
var event_3 = new RenderEvent(type, this.inversePixelTransform_, frameState, context);
layer.dispatchEvent(event_3);
}
};
/**
* @param {WebGLRenderingContext} context The rendering context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @protected
*/
WebGLLayerRenderer.prototype.preRender = function (context, frameState) {
this.dispatchRenderEvent_(RenderEventType.PRERENDER, context, frameState);
};
/**
* @param {WebGLRenderingContext} context The rendering context.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @protected
*/
WebGLLayerRenderer.prototype.postRender = function (context, frameState) {
this.dispatchRenderEvent_(RenderEventType.POSTRENDER, context, frameState);
};
/**
* @param {import("../../pixel.js").Pixel} pixel Pixel.
* @param {import("../../PluggableMap.js").FrameState} frameState FrameState.
* @param {number} hitTolerance Hit tolerance in pixels.
* @return {Uint8ClampedArray|Uint8Array} The result. If there is no data at the pixel
* location, null will be returned. If there is data, but pixel values cannot be
* returned, and empty array will be returned.
*/
WebGLLayerRenderer.prototype.getDataAtPixel = function (pixel, frameState, hitTolerance) {
var renderPixel = applyTransform([frameState.pixelRatio, 0, 0, frameState.pixelRatio, 0, 0], pixel.slice());
var gl = this.helper.getGL();
if (!gl) {
return null;
}
var layer = this.getLayer();
var layerExtent = layer.getExtent();
if (layerExtent) {
var renderCoordinate = applyTransform(frameState.pixelToCoordinateTransform, pixel.slice());
/** get only data inside of the layer extent */
if (!containsCoordinate(layerExtent, renderCoordinate)) {
return null;
}
}
var attributes = gl.getContextAttributes();
if (!attributes || !attributes.preserveDrawingBuffer) {
// we assume there is data at the given pixel (although there might not be)
return new Uint8Array();
}
var x = Math.round(renderPixel[0]);
var y = Math.round(renderPixel[1]);
var pixelContext = this.pixelContext_;
if (!pixelContext) {
var pixelCanvas = document.createElement('canvas');
pixelCanvas.width = 1;
pixelCanvas.height = 1;
pixelContext = pixelCanvas.getContext('2d');
this.pixelContext_ = pixelContext;
}
pixelContext.clearRect(0, 0, 1, 1);
var data;
try {
pixelContext.drawImage(gl.canvas, x, y, 1, 1, 0, 0, 1, 1);
data = pixelContext.getImageData(0, 0, 1, 1).data;
}
catch (err) {
return data;
}
if (data[3] === 0) {
return null;
}
return data;
};
return WebGLLayerRenderer;
}(LayerRenderer));
var tmpArray_ = [];
var bufferPositions_ = { vertexPosition: 0, indexPosition: 0 };
function writePointVertex(buffer, pos, x, y, index) {
buffer[pos + 0] = x;
buffer[pos + 1] = y;
buffer[pos + 2] = index;
}
/**
* An object holding positions both in an index and a vertex buffer.
* @typedef {Object} BufferPositions
* @property {number} vertexPosition Position in the vertex buffer
* @property {number} indexPosition Position in the index buffer
*/
/**
* Pushes a quad (two triangles) based on a point geometry
* @param {Float32Array} instructions Array of render instructions for points.
* @param {number} elementIndex Index from which render instructions will be read.
* @param {Float32Array} vertexBuffer Buffer in the form of a typed array.
* @param {Uint32Array} indexBuffer Buffer in the form of a typed array.
* @param {number} customAttributesCount Amount of custom attributes for each element.
* @param {BufferPositions} [bufferPositions] Buffer write positions; if not specified, positions will be set at 0.
* @return {BufferPositions} New buffer positions where to write next
* @property {number} vertexPosition New position in the vertex buffer where future writes should start.
* @property {number} indexPosition New position in the index buffer where future writes should start.
* @private
*/
export function writePointFeatureToBuffers(instructions, elementIndex, vertexBuffer, indexBuffer, customAttributesCount, bufferPositions) {
// This is for x, y and index
var baseVertexAttrsCount = 3;
var baseInstructionsCount = 2;
var stride = baseVertexAttrsCount + customAttributesCount;
var x = instructions[elementIndex + 0];
var y = instructions[elementIndex + 1];
// read custom numerical attributes on the feature
var customAttrs = tmpArray_;
customAttrs.length = customAttributesCount;
for (var i = 0; i < customAttrs.length; i++) {
customAttrs[i] = instructions[elementIndex + baseInstructionsCount + i];
}
var vPos = bufferPositions ? bufferPositions.vertexPosition : 0;
var iPos = bufferPositions ? bufferPositions.indexPosition : 0;
var baseIndex = vPos / stride;
// push vertices for each of the four quad corners (first standard then custom attributes)
writePointVertex(vertexBuffer, vPos, x, y, 0);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
writePointVertex(vertexBuffer, vPos, x, y, 1);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
writePointVertex(vertexBuffer, vPos, x, y, 2);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
writePointVertex(vertexBuffer, vPos, x, y, 3);
customAttrs.length &&
vertexBuffer.set(customAttrs, vPos + baseVertexAttrsCount);
vPos += stride;
indexBuffer[iPos++] = baseIndex;
indexBuffer[iPos++] = baseIndex + 1;
indexBuffer[iPos++] = baseIndex + 3;
indexBuffer[iPos++] = baseIndex + 1;
indexBuffer[iPos++] = baseIndex + 2;
indexBuffer[iPos++] = baseIndex + 3;
bufferPositions_.vertexPosition = vPos;
bufferPositions_.indexPosition = iPos;
return bufferPositions_;
}
/**
* Returns a texture of 1x1 pixel, white
* @private
* @return {ImageData} Image data.
*/
export function getBlankImageData() {
var canvas = document.createElement('canvas');
var image = canvas.getContext('2d').createImageData(1, 1);
image.data[0] = 255;
image.data[1] = 255;
image.data[2] = 255;
image.data[3] = 255;
return image;
}
/**
* Generates a color array based on a numerical id
* Note: the range for each component is 0 to 1 with 256 steps
* @param {number} id Id
* @param {Array<number>} [opt_array] Reusable array
* @return {Array<number>} Color array containing the encoded id
*/
export function colorEncodeId(id, opt_array) {
var array = opt_array || [];
var radix = 256;
var divide = radix - 1;
array[0] = Math.floor(id / radix / radix / radix) / divide;
array[1] = (Math.floor(id / radix / radix) % radix) / divide;
array[2] = (Math.floor(id / radix) % radix) / divide;
array[3] = (id % radix) / divide;
return array;
}
/**
* Reads an id from a color-encoded array
* Note: the expected range for each component is 0 to 1 with 256 steps.
* @param {Array<number>} color Color array containing the encoded id
* @return {number} Decoded id
*/
export function colorDecodeId(color) {
var id = 0;
var radix = 256;
var mult = radix - 1;
id += Math.round(color[0] * radix * radix * radix * mult);
id += Math.round(color[1] * radix * radix * mult);
id += Math.round(color[2] * radix * mult);
id += Math.round(color[3] * mult);
return id;
}
export default WebGLLayerRenderer;
//# sourceMappingURL=Layer.js.map

1
node_modules/ol/renderer/webgl/Layer.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

324
node_modules/ol/renderer/webgl/PointsLayer.d.ts generated vendored Normal file
View File

@@ -0,0 +1,324 @@
export default WebGLPointsLayerRenderer;
/**
* A description of a custom attribute to be passed on to the GPU, with a value different
* for each feature.
*/
export type CustomAttribute = {
/**
* Attribute name.
*/
name: string;
/**
* This callback computes the numerical value of the
* attribute for a given feature (properties are available as 2nd arg for quicker access).
*/
callback: (arg0: import("../../Feature").default, arg1: {
[x: string]: any;
}) => number;
};
/**
* Object that holds a reference to a feature, its geometry and properties. Used to optimize
* rebuildBuffers by accessing these objects quicker.
*/
export type FeatureCacheItem = {
/**
* Feature
*/
feature: import("../../Feature").default;
/**
* Feature properties
*/
properties: {
[x: string]: any;
};
/**
* Feature geometry
*/
geometry: import("../../geom").Geometry;
};
export type Options = {
/**
* A CSS class name to set to the canvas element.
*/
className?: string | undefined;
/**
* These attributes will be read from the features in the source and then
* passed to the GPU. The `name` property of each attribute will serve as its identifier:
* * In the vertex shader as an `attribute` by prefixing it with `a_`
* * In the fragment shader as a `varying` by prefixing it with `v_`
* Please note that these can only be numerical values.
*/
attributes?: CustomAttribute[] | undefined;
/**
* Vertex shader source, mandatory.
*/
vertexShader: string;
/**
* Fragment shader source, mandatory.
*/
fragmentShader: string;
/**
* Vertex shader source for hit detection rendering.
*/
hitVertexShader?: string | undefined;
/**
* Fragment shader source for hit detection rendering.
*/
hitFragmentShader?: string | undefined;
/**
* Uniform definitions for the post process steps
* Please note that `u_texture` is reserved for the main texture slot and `u_opacity` is reserved for the layer opacity.
*/
uniforms?: {
[x: string]: import("../../webgl/Helper.js").UniformValue;
} | undefined;
/**
* Post-processes definitions
*/
postProcesses?: import("./Layer.js").PostProcessesOptions[] | undefined;
};
/**
* @typedef {Object} CustomAttribute A description of a custom attribute to be passed on to the GPU, with a value different
* for each feature.
* @property {string} name Attribute name.
* @property {function(import("../../Feature").default, Object<string, *>):number} callback This callback computes the numerical value of the
* attribute for a given feature (properties are available as 2nd arg for quicker access).
*/
/**
* @typedef {Object} FeatureCacheItem Object that holds a reference to a feature, its geometry and properties. Used to optimize
* rebuildBuffers by accessing these objects quicker.
* @property {import("../../Feature").default} feature Feature
* @property {Object<string, *>} properties Feature properties
* @property {import("../../geom").Geometry} geometry Feature geometry
*/
/**
* @typedef {Object} Options
* @property {string} [className='ol-layer'] A CSS class name to set to the canvas element.
* @property {Array<CustomAttribute>} [attributes] These attributes will be read from the features in the source and then
* passed to the GPU. The `name` property of each attribute will serve as its identifier:
* * In the vertex shader as an `attribute` by prefixing it with `a_`
* * In the fragment shader as a `varying` by prefixing it with `v_`
* Please note that these can only be numerical values.
* @property {string} vertexShader Vertex shader source, mandatory.
* @property {string} fragmentShader Fragment shader source, mandatory.
* @property {string} [hitVertexShader] Vertex shader source for hit detection rendering.
* @property {string} [hitFragmentShader] Fragment shader source for hit detection rendering.
* @property {Object<string,import("../../webgl/Helper").UniformValue>} [uniforms] Uniform definitions for the post process steps
* Please note that `u_texture` is reserved for the main texture slot and `u_opacity` is reserved for the layer opacity.
* @property {Array<import("./Layer").PostProcessesOptions>} [postProcesses] Post-processes definitions
*/
/**
* @classdesc
* WebGL vector renderer optimized for points.
* All features will be rendered as quads (two triangles forming a square). New data will be flushed to the GPU
* every time the vector source changes.
*
* You need to provide vertex and fragment shaders for rendering. This can be done using
* {@link module:ol/webgl/ShaderBuilder~ShaderBuilder} utilities. These shaders shall expect a `a_position` attribute
* containing the screen-space projected center of the quad, as well as a `a_index` attribute
* whose value (0, 1, 2 or 3) indicates which quad vertex is currently getting processed (see structure below).
*
* To include variable attributes in the shaders, you need to declare them using the `attributes` property of
* the options object like so:
* ```js
* new WebGLPointsLayerRenderer(layer, {
* attributes: [
* {
* name: 'size',
* callback: function(feature) {
* // compute something with the feature
* }
* },
* {
* name: 'weight',
* callback: function(feature) {
* // compute something with the feature
* }
* },
* ],
* vertexShader:
* // shader using attribute a_weight and a_size
* fragmentShader:
* // shader using varying v_weight and v_size
* ```
*
* To enable hit detection, you must as well provide dedicated shaders using the `hitVertexShader`
* and `hitFragmentShader` properties. These shall expect the `a_hitColor` attribute to contain
* the final color that will have to be output for hit detection to work.
*
* The following uniform is used for the main texture: `u_texture`.
* The following uniform is used for the layer opacity: `u_opacity`.
*
* Please note that the main shader output should have premultiplied alpha, otherwise visual anomalies may occur.
*
* Points are rendered as quads with the following structure:
*
* ```
* (u0, v1) (u1, v1)
* [3]----------[2]
* |` |
* | ` |
* | ` |
* | ` |
* | ` |
* | ` |
* [0]----------[1]
* (u0, v0) (u1, v0)
* ```
*
* This uses {@link module:ol/webgl/Helper~WebGLHelper} internally.
*
* @api
*/
declare class WebGLPointsLayerRenderer extends WebGLLayerRenderer<any> {
/**
* @param {import("../../layer/Layer.js").default} layer Layer.
* @param {Options} options Options.
*/
constructor(layer: import("../../layer/Layer.js").default, options: Options);
sourceRevision_: number;
verticesBuffer_: WebGLArrayBuffer;
hitVerticesBuffer_: WebGLArrayBuffer;
indicesBuffer_: WebGLArrayBuffer;
/**
* @private
*/
private vertexShader_;
/**
* @private
*/
private fragmentShader_;
/**
* @type {WebGLProgram}
* @private
*/
private program_;
/**
* @type {boolean}
* @private
*/
private hitDetectionEnabled_;
/**
* @private
*/
private hitVertexShader_;
/**
* @private
*/
private hitFragmentShader_;
/**
* @type {WebGLProgram}
* @private
*/
private hitProgram_;
/**
* A list of attributes used by the renderer. By default only the position and
* index of the vertex (0 to 3) are required.
* @type {Array<import('../../webgl/Helper.js').AttributeDescription>}
*/
attributes: Array<import('../../webgl/Helper.js').AttributeDescription>;
/**
* A list of attributes used for hit detection.
* @type {Array<import('../../webgl/Helper.js').AttributeDescription>}
*/
hitDetectionAttributes: Array<import('../../webgl/Helper.js').AttributeDescription>;
customAttributes: CustomAttribute[];
previousExtent_: import("../../extent.js").Extent;
/**
* This transform is updated on every frame and is the composition of:
* - invert of the world->screen transform that was used when rebuilding buffers (see `this.renderTransform_`)
* - current world->screen transform
* @type {import("../../transform.js").Transform}
* @private
*/
private currentTransform_;
/**
* This transform is updated when buffers are rebuilt and converts world space coordinates to screen space
* @type {import("../../transform.js").Transform}
* @private
*/
private renderTransform_;
/**
* @type {import("../../transform.js").Transform}
* @private
*/
private invertRenderTransform_;
/**
* @type {Float32Array}
* @private
*/
private renderInstructions_;
/**
* These instructions are used for hit detection
* @type {Float32Array}
* @private
*/
private hitRenderInstructions_;
/**
* @type {WebGLRenderTarget}
* @private
*/
private hitRenderTarget_;
/**
* Keep track of latest message sent to worker
* @type {number}
* @private
*/
private generateBuffersRun_;
worker_: Worker;
/**
* This object will be updated when the source changes. Key is uid.
* @type {Object<string, FeatureCacheItem>}
* @private
*/
private featureCache_;
/**
* Amount of features in the cache.
* @type {number}
* @private
*/
private featureCount_;
sourceListenKeys_: import("../../events.js").EventsKey[];
/**
* @param {import("../../source/Vector.js").VectorSourceEvent} event Event.
* @private
*/
private handleSourceFeatureAdded_;
/**
* @param {import("../../source/Vector.js").VectorSourceEvent} event Event.
* @private
*/
private handleSourceFeatureChanged_;
/**
* @param {import("../../source/Vector.js").VectorSourceEvent} event Event.
* @private
*/
private handleSourceFeatureDelete_;
/**
* @private
*/
private handleSourceFeatureClear_;
/**
* Render the layer.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {HTMLElement} The rendered element.
*/
renderFrame(frameState: import("../../PluggableMap.js").FrameState): HTMLElement;
/**
* Rebuild internal webgl buffers based on current view extent; costly, should not be called too much
* @param {import("../../PluggableMap").FrameState} frameState Frame state.
* @private
*/
private rebuildBuffers_;
/**
* Render the hit detection data to the corresponding render target
* @param {import("../../PluggableMap.js").FrameState} frameState current frame state
* @param {number} startWorld the world to render in the first iteration
* @param {number} endWorld the last world to render
* @param {number} worldWidth the width of the worlds being rendered
*/
renderHitDetection(frameState: import("../../PluggableMap.js").FrameState, startWorld: number, endWorld: number, worldWidth: number): void;
}
import WebGLLayerRenderer from "./Layer.js";
import WebGLArrayBuffer from "../../webgl/Buffer.js";
//# sourceMappingURL=PointsLayer.d.ts.map

1
node_modules/ol/renderer/webgl/PointsLayer.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"PointsLayer.d.ts","sourceRoot":"","sources":["../../src/renderer/webgl/PointsLayer.js"],"names":[],"mappings":";;;;;;;;;UA+Bc,MAAM;;;;;qBACG,OAAO,eAAe,EAAE,OAAO;YAAS,MAAM;UAAM,MAAM;;;;;;;;;;aAOnE,OAAO,eAAe,EAAE,OAAO;;;;;YACxB,MAAM;;;;;cACb,OAAO,YAAY,EAAE,QAAQ;;;;;;;;;;;;;;;;;;kBAW7B,MAAM;;;;oBACN,MAAM;;;;;;;;;;;;;;;;;;;;;AAzBpB;;;;;;GAMG;AAEH;;;;;;GAMG;AAEH;;;;;;;;;;;;;;;GAeG;AAEH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8DG;AACH;IACE;;;OAGG;IACH,mBAHW,OAAO,sBAAsB,EAAE,OAAO,WACtC,OAAO,EAwQjB;IA1PC,wBAAyB;IAEzB,kCAAuE;IACvE,qCAA0E;IAC1E,iCAGC;IAED;;OAEG;IACH,sBAAyC;IAEzC;;OAEG;IACH,wBAA6C;IAE7C;;;OAGG;IACH,iBAAa;IAEb;;;OAGG;IACH,6BACqE;IAErE;;OAEG;IACH,yBAA+C;IAE/C;;OAEG;IACH,2BAAmD;IAEnD;;;OAGG;IACH,oBAAgB;IAYhB;;;;OAIG;IACH,YAFU,MAAM,OAAO,uBAAuB,EAAE,oBAAoB,CAAC,CAa3C;IAE1B;;;OAGG;IACH,wBAFU,MAAM,OAAO,uBAAuB,EAAE,oBAAoB,CAAC,CAuB3C;IAE1B,oCAAoE;IAEpE,kDAAoC;IAEpC;;;;;;OAMG;IACH,0BAAkD;IAElD;;;;OAIG;IACH,yBAAyC;IAEzC;;;OAGG;IACH,+BAA+C;IAE/C;;;OAGG;IACH,4BAA8C;IAE9C;;;;OAIG;IACH,+BAAiD;IAEjD;;;OAGG;IACH,yBAAqB;IAErB;;;;OAIG;IACH,4BAA4B;IAE5B,gBAAkC;IA4ClC;;;;OAIG;IACH,sBAAuB;IAEvB;;;;OAIG;IACH,sBAAsB;IAGtB,yDAyBC;IA6BH;;;OAGG;IACH,kCAQC;IAED;;;OAGG;IACH,oCAOC;IAED;;;OAGG;IACH,mCAIC;IAED;;OAEG;IACH,kCAGC;IAED;;;;OAIG;IACH,wBAHW,OAAO,uBAAuB,EAAE,UAAU,GACzC,WAAW,CAmDtB;IA6CD;;;;OAIG;IACH,wBA4GC;IA0CD;;;;;;OAMG;IACH,+BALW,OAAO,uBAAuB,EAAE,UAAU,cAC1C,MAAM,YACN,MAAM,cACN,MAAM,QAmChB;CAcF"}

623
node_modules/ol/renderer/webgl/PointsLayer.js generated vendored Normal file
View File

@@ -0,0 +1,623 @@
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
/**
* @module ol/renderer/webgl/PointsLayer
*/
import BaseVector from '../../layer/BaseVector.js';
import VectorEventType from '../../source/VectorEventType.js';
import ViewHint from '../../ViewHint.js';
import WebGLArrayBuffer from '../../webgl/Buffer.js';
import WebGLLayerRenderer, { WebGLWorkerMessageType, colorDecodeId, colorEncodeId, } from './Layer.js';
import WebGLRenderTarget from '../../webgl/RenderTarget.js';
import { ARRAY_BUFFER, DYNAMIC_DRAW, ELEMENT_ARRAY_BUFFER } from '../../webgl.js';
import { AttributeType, DefaultUniform } from '../../webgl/Helper.js';
import { apply as applyTransform, create as createTransform, makeInverse as makeInverseTransform, multiply as multiplyTransform, translate as translateTransform, } from '../../transform.js';
import { assert } from '../../asserts.js';
import { buffer, createEmpty, equals, getWidth } from '../../extent.js';
import { create as createWebGLWorker } from '../../worker/webgl.js';
import { getUid } from '../../util.js';
import { listen, unlistenByKey } from '../../events.js';
/**
* @typedef {Object} CustomAttribute A description of a custom attribute to be passed on to the GPU, with a value different
* for each feature.
* @property {string} name Attribute name.
* @property {function(import("../../Feature").default, Object<string, *>):number} callback This callback computes the numerical value of the
* attribute for a given feature (properties are available as 2nd arg for quicker access).
*/
/**
* @typedef {Object} FeatureCacheItem Object that holds a reference to a feature, its geometry and properties. Used to optimize
* rebuildBuffers by accessing these objects quicker.
* @property {import("../../Feature").default} feature Feature
* @property {Object<string, *>} properties Feature properties
* @property {import("../../geom").Geometry} geometry Feature geometry
*/
/**
* @typedef {Object} Options
* @property {string} [className='ol-layer'] A CSS class name to set to the canvas element.
* @property {Array<CustomAttribute>} [attributes] These attributes will be read from the features in the source and then
* passed to the GPU. The `name` property of each attribute will serve as its identifier:
* * In the vertex shader as an `attribute` by prefixing it with `a_`
* * In the fragment shader as a `varying` by prefixing it with `v_`
* Please note that these can only be numerical values.
* @property {string} vertexShader Vertex shader source, mandatory.
* @property {string} fragmentShader Fragment shader source, mandatory.
* @property {string} [hitVertexShader] Vertex shader source for hit detection rendering.
* @property {string} [hitFragmentShader] Fragment shader source for hit detection rendering.
* @property {Object<string,import("../../webgl/Helper").UniformValue>} [uniforms] Uniform definitions for the post process steps
* Please note that `u_texture` is reserved for the main texture slot and `u_opacity` is reserved for the layer opacity.
* @property {Array<import("./Layer").PostProcessesOptions>} [postProcesses] Post-processes definitions
*/
/**
* @classdesc
* WebGL vector renderer optimized for points.
* All features will be rendered as quads (two triangles forming a square). New data will be flushed to the GPU
* every time the vector source changes.
*
* You need to provide vertex and fragment shaders for rendering. This can be done using
* {@link module:ol/webgl/ShaderBuilder~ShaderBuilder} utilities. These shaders shall expect a `a_position` attribute
* containing the screen-space projected center of the quad, as well as a `a_index` attribute
* whose value (0, 1, 2 or 3) indicates which quad vertex is currently getting processed (see structure below).
*
* To include variable attributes in the shaders, you need to declare them using the `attributes` property of
* the options object like so:
* ```js
* new WebGLPointsLayerRenderer(layer, {
* attributes: [
* {
* name: 'size',
* callback: function(feature) {
* // compute something with the feature
* }
* },
* {
* name: 'weight',
* callback: function(feature) {
* // compute something with the feature
* }
* },
* ],
* vertexShader:
* // shader using attribute a_weight and a_size
* fragmentShader:
* // shader using varying v_weight and v_size
* ```
*
* To enable hit detection, you must as well provide dedicated shaders using the `hitVertexShader`
* and `hitFragmentShader` properties. These shall expect the `a_hitColor` attribute to contain
* the final color that will have to be output for hit detection to work.
*
* The following uniform is used for the main texture: `u_texture`.
* The following uniform is used for the layer opacity: `u_opacity`.
*
* Please note that the main shader output should have premultiplied alpha, otherwise visual anomalies may occur.
*
* Points are rendered as quads with the following structure:
*
* ```
* (u0, v1) (u1, v1)
* [3]----------[2]
* |` |
* | ` |
* | ` |
* | ` |
* | ` |
* | ` |
* [0]----------[1]
* (u0, v0) (u1, v0)
* ```
*
* This uses {@link module:ol/webgl/Helper~WebGLHelper} internally.
*
* @api
*/
var WebGLPointsLayerRenderer = /** @class */ (function (_super) {
__extends(WebGLPointsLayerRenderer, _super);
/**
* @param {import("../../layer/Layer.js").default} layer Layer.
* @param {Options} options Options.
*/
function WebGLPointsLayerRenderer(layer, options) {
var _this = this;
var uniforms = options.uniforms || {};
var projectionMatrixTransform = createTransform();
uniforms[DefaultUniform.PROJECTION_MATRIX] = projectionMatrixTransform;
_this = _super.call(this, layer, {
uniforms: uniforms,
postProcesses: options.postProcesses,
}) || this;
_this.ready = false;
_this.sourceRevision_ = -1;
_this.verticesBuffer_ = new WebGLArrayBuffer(ARRAY_BUFFER, DYNAMIC_DRAW);
_this.hitVerticesBuffer_ = new WebGLArrayBuffer(ARRAY_BUFFER, DYNAMIC_DRAW);
_this.indicesBuffer_ = new WebGLArrayBuffer(ELEMENT_ARRAY_BUFFER, DYNAMIC_DRAW);
/**
* @private
*/
_this.vertexShader_ = options.vertexShader;
/**
* @private
*/
_this.fragmentShader_ = options.fragmentShader;
/**
* @type {WebGLProgram}
* @private
*/
_this.program_;
/**
* @type {boolean}
* @private
*/
_this.hitDetectionEnabled_ =
options.hitFragmentShader && options.hitVertexShader ? true : false;
/**
* @private
*/
_this.hitVertexShader_ = options.hitVertexShader;
/**
* @private
*/
_this.hitFragmentShader_ = options.hitFragmentShader;
/**
* @type {WebGLProgram}
* @private
*/
_this.hitProgram_;
var customAttributes = options.attributes
? options.attributes.map(function (attribute) {
return {
name: 'a_' + attribute.name,
size: 1,
type: AttributeType.FLOAT,
};
})
: [];
/**
* A list of attributes used by the renderer. By default only the position and
* index of the vertex (0 to 3) are required.
* @type {Array<import('../../webgl/Helper.js').AttributeDescription>}
*/
_this.attributes = [
{
name: 'a_position',
size: 2,
type: AttributeType.FLOAT,
},
{
name: 'a_index',
size: 1,
type: AttributeType.FLOAT,
},
].concat(customAttributes);
/**
* A list of attributes used for hit detection.
* @type {Array<import('../../webgl/Helper.js').AttributeDescription>}
*/
_this.hitDetectionAttributes = [
{
name: 'a_position',
size: 2,
type: AttributeType.FLOAT,
},
{
name: 'a_index',
size: 1,
type: AttributeType.FLOAT,
},
{
name: 'a_hitColor',
size: 4,
type: AttributeType.FLOAT,
},
{
name: 'a_featureUid',
size: 1,
type: AttributeType.FLOAT,
},
].concat(customAttributes);
_this.customAttributes = options.attributes ? options.attributes : [];
_this.previousExtent_ = createEmpty();
/**
* This transform is updated on every frame and is the composition of:
* - invert of the world->screen transform that was used when rebuilding buffers (see `this.renderTransform_`)
* - current world->screen transform
* @type {import("../../transform.js").Transform}
* @private
*/
_this.currentTransform_ = projectionMatrixTransform;
/**
* This transform is updated when buffers are rebuilt and converts world space coordinates to screen space
* @type {import("../../transform.js").Transform}
* @private
*/
_this.renderTransform_ = createTransform();
/**
* @type {import("../../transform.js").Transform}
* @private
*/
_this.invertRenderTransform_ = createTransform();
/**
* @type {Float32Array}
* @private
*/
_this.renderInstructions_ = new Float32Array(0);
/**
* These instructions are used for hit detection
* @type {Float32Array}
* @private
*/
_this.hitRenderInstructions_ = new Float32Array(0);
/**
* @type {WebGLRenderTarget}
* @private
*/
_this.hitRenderTarget_;
/**
* Keep track of latest message sent to worker
* @type {number}
* @private
*/
_this.generateBuffersRun_ = 0;
_this.worker_ = createWebGLWorker();
_this.worker_.addEventListener('message',
/**
* @param {*} event Event.
* @this {WebGLPointsLayerRenderer}
*/
function (event) {
var received = event.data;
if (received.type === WebGLWorkerMessageType.GENERATE_BUFFERS) {
var projectionTransform = received.projectionTransform;
if (received.hitDetection) {
this.hitVerticesBuffer_.fromArrayBuffer(received.vertexBuffer);
this.helper.flushBufferData(this.hitVerticesBuffer_);
}
else {
this.verticesBuffer_.fromArrayBuffer(received.vertexBuffer);
this.helper.flushBufferData(this.verticesBuffer_);
}
this.indicesBuffer_.fromArrayBuffer(received.indexBuffer);
this.helper.flushBufferData(this.indicesBuffer_);
this.renderTransform_ = projectionTransform;
makeInverseTransform(this.invertRenderTransform_, this.renderTransform_);
if (received.hitDetection) {
this.hitRenderInstructions_ = new Float32Array(event.data.renderInstructions);
}
else {
this.renderInstructions_ = new Float32Array(event.data.renderInstructions);
if (received.generateBuffersRun === this.generateBuffersRun_) {
this.ready = true;
}
}
this.getLayer().changed();
}
}.bind(_this));
/**
* This object will be updated when the source changes. Key is uid.
* @type {Object<string, FeatureCacheItem>}
* @private
*/
_this.featureCache_ = {};
/**
* Amount of features in the cache.
* @type {number}
* @private
*/
_this.featureCount_ = 0;
var source = _this.getLayer().getSource();
_this.sourceListenKeys_ = [
listen(source, VectorEventType.ADDFEATURE, _this.handleSourceFeatureAdded_, _this),
listen(source, VectorEventType.CHANGEFEATURE, _this.handleSourceFeatureChanged_, _this),
listen(source, VectorEventType.REMOVEFEATURE, _this.handleSourceFeatureDelete_, _this),
listen(source, VectorEventType.CLEAR, _this.handleSourceFeatureClear_, _this),
];
source.forEachFeature(function (feature) {
this.featureCache_[getUid(feature)] = {
feature: feature,
properties: feature.getProperties(),
geometry: feature.getGeometry(),
};
this.featureCount_++;
}.bind(_this));
return _this;
}
WebGLPointsLayerRenderer.prototype.afterHelperCreated = function () {
this.program_ = this.helper.getProgram(this.fragmentShader_, this.vertexShader_);
if (this.hitDetectionEnabled_) {
this.hitProgram_ = this.helper.getProgram(this.hitFragmentShader_, this.hitVertexShader_);
this.hitRenderTarget_ = new WebGLRenderTarget(this.helper);
}
};
/**
* @param {import("../../source/Vector.js").VectorSourceEvent} event Event.
* @private
*/
WebGLPointsLayerRenderer.prototype.handleSourceFeatureAdded_ = function (event) {
var feature = event.feature;
this.featureCache_[getUid(feature)] = {
feature: feature,
properties: feature.getProperties(),
geometry: feature.getGeometry(),
};
this.featureCount_++;
};
/**
* @param {import("../../source/Vector.js").VectorSourceEvent} event Event.
* @private
*/
WebGLPointsLayerRenderer.prototype.handleSourceFeatureChanged_ = function (event) {
var feature = event.feature;
this.featureCache_[getUid(feature)] = {
feature: feature,
properties: feature.getProperties(),
geometry: feature.getGeometry(),
};
};
/**
* @param {import("../../source/Vector.js").VectorSourceEvent} event Event.
* @private
*/
WebGLPointsLayerRenderer.prototype.handleSourceFeatureDelete_ = function (event) {
var feature = event.feature;
delete this.featureCache_[getUid(feature)];
this.featureCount_--;
};
/**
* @private
*/
WebGLPointsLayerRenderer.prototype.handleSourceFeatureClear_ = function () {
this.featureCache_ = {};
this.featureCount_ = 0;
};
/**
* Render the layer.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {HTMLElement} The rendered element.
*/
WebGLPointsLayerRenderer.prototype.renderFrame = function (frameState) {
var gl = this.helper.getGL();
this.preRender(gl, frameState);
var projection = frameState.viewState.projection;
var layer = this.getLayer();
var vectorSource = layer.getSource();
// FIXME fix hit detection isn't reliable when rendering multiple worlds
var multiWorld = vectorSource.getWrapX() && projection.canWrapX();
var projectionExtent = projection.getExtent();
var extent = frameState.extent;
var worldWidth = multiWorld ? getWidth(projectionExtent) : null;
var endWorld = multiWorld
? Math.ceil((extent[2] - projectionExtent[2]) / worldWidth) + 1
: 1;
var startWorld = multiWorld
? Math.floor((extent[0] - projectionExtent[0]) / worldWidth)
: 0;
var world = startWorld;
var renderCount = this.indicesBuffer_.getSize();
do {
// apply the current projection transform with the invert of the one used to fill buffers
this.helper.makeProjectionTransform(frameState, this.currentTransform_);
translateTransform(this.currentTransform_, world * worldWidth, 0);
multiplyTransform(this.currentTransform_, this.invertRenderTransform_);
this.helper.applyUniforms(frameState);
this.helper.drawElements(0, renderCount);
} while (++world < endWorld);
this.helper.finalizeDraw(frameState, this.dispatchPreComposeEvent, this.dispatchPostComposeEvent);
var canvas = this.helper.getCanvas();
if (this.hitDetectionEnabled_) {
this.renderHitDetection(frameState, startWorld, endWorld, worldWidth);
this.hitRenderTarget_.clearCachedData();
}
this.postRender(gl, frameState);
return canvas;
};
/**
* Determine whether renderFrame should be called.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {boolean} Layer is ready to be rendered.
*/
WebGLPointsLayerRenderer.prototype.prepareFrameInternal = function (frameState) {
var layer = this.getLayer();
var vectorSource = layer.getSource();
var viewState = frameState.viewState;
var viewNotMoving = !frameState.viewHints[ViewHint.ANIMATING] &&
!frameState.viewHints[ViewHint.INTERACTING];
var extentChanged = !equals(this.previousExtent_, frameState.extent);
var sourceChanged = this.sourceRevision_ < vectorSource.getRevision();
if (sourceChanged) {
this.sourceRevision_ = vectorSource.getRevision();
}
if (viewNotMoving && (extentChanged || sourceChanged)) {
var projection = viewState.projection;
var resolution = viewState.resolution;
var renderBuffer = layer instanceof BaseVector ? layer.getRenderBuffer() : 0;
var extent = buffer(frameState.extent, renderBuffer * resolution);
vectorSource.loadFeatures(extent, resolution, projection);
this.rebuildBuffers_(frameState);
this.previousExtent_ = frameState.extent.slice();
}
this.helper.useProgram(this.program_);
this.helper.prepareDraw(frameState);
// write new data
this.helper.bindBuffer(this.verticesBuffer_);
this.helper.bindBuffer(this.indicesBuffer_);
this.helper.enableAttributes(this.attributes);
return true;
};
/**
* Rebuild internal webgl buffers based on current view extent; costly, should not be called too much
* @param {import("../../PluggableMap").FrameState} frameState Frame state.
* @private
*/
WebGLPointsLayerRenderer.prototype.rebuildBuffers_ = function (frameState) {
// saves the projection transform for the current frame state
var projectionTransform = createTransform();
this.helper.makeProjectionTransform(frameState, projectionTransform);
// here we anticipate the amount of render instructions that we well generate
// this can be done since we know that for normal render we only have x, y as base instructions,
// and x, y, r, g, b, a and featureUid for hit render instructions
// and we also know the amount of custom attributes to append to these
var totalInstructionsCount = (2 + this.customAttributes.length) * this.featureCount_;
if (!this.renderInstructions_ ||
this.renderInstructions_.length !== totalInstructionsCount) {
this.renderInstructions_ = new Float32Array(totalInstructionsCount);
}
if (this.hitDetectionEnabled_) {
var totalHitInstructionsCount = (7 + this.customAttributes.length) * this.featureCount_;
if (!this.hitRenderInstructions_ ||
this.hitRenderInstructions_.length !== totalHitInstructionsCount) {
this.hitRenderInstructions_ = new Float32Array(totalHitInstructionsCount);
}
}
// loop on features to fill the buffer
var featureCache, geometry;
var tmpCoords = [];
var tmpColor = [];
var renderIndex = 0;
var hitIndex = 0;
var hitColor;
for (var featureUid in this.featureCache_) {
featureCache = this.featureCache_[featureUid];
geometry = /** @type {import("../../geom").Point} */ (featureCache.geometry);
if (!geometry || geometry.getType() !== 'Point') {
continue;
}
tmpCoords[0] = geometry.getFlatCoordinates()[0];
tmpCoords[1] = geometry.getFlatCoordinates()[1];
applyTransform(projectionTransform, tmpCoords);
hitColor = colorEncodeId(hitIndex + 6, tmpColor);
this.renderInstructions_[renderIndex++] = tmpCoords[0];
this.renderInstructions_[renderIndex++] = tmpCoords[1];
// for hit detection, the feature uid is saved in the opacity value
// and the index of the opacity value is encoded in the color values
if (this.hitDetectionEnabled_) {
this.hitRenderInstructions_[hitIndex++] = tmpCoords[0];
this.hitRenderInstructions_[hitIndex++] = tmpCoords[1];
this.hitRenderInstructions_[hitIndex++] = hitColor[0];
this.hitRenderInstructions_[hitIndex++] = hitColor[1];
this.hitRenderInstructions_[hitIndex++] = hitColor[2];
this.hitRenderInstructions_[hitIndex++] = hitColor[3];
this.hitRenderInstructions_[hitIndex++] = Number(featureUid);
}
// pushing custom attributes
var value = void 0;
for (var j = 0; j < this.customAttributes.length; j++) {
value = this.customAttributes[j].callback(featureCache.feature, featureCache.properties);
this.renderInstructions_[renderIndex++] = value;
if (this.hitDetectionEnabled_) {
this.hitRenderInstructions_[hitIndex++] = value;
}
}
}
/** @type {import('./Layer').WebGLWorkerGenerateBuffersMessage} */
var message = {
type: WebGLWorkerMessageType.GENERATE_BUFFERS,
renderInstructions: this.renderInstructions_.buffer,
customAttributesCount: this.customAttributes.length,
};
// additional properties will be sent back as-is by the worker
message['projectionTransform'] = projectionTransform;
message['generateBuffersRun'] = ++this.generateBuffersRun_;
this.ready = false;
this.worker_.postMessage(message, [this.renderInstructions_.buffer]);
this.renderInstructions_ = null;
/** @type {import('./Layer').WebGLWorkerGenerateBuffersMessage} */
if (this.hitDetectionEnabled_) {
var hitMessage = {
type: WebGLWorkerMessageType.GENERATE_BUFFERS,
renderInstructions: this.hitRenderInstructions_.buffer,
customAttributesCount: 5 + this.customAttributes.length,
};
hitMessage['projectionTransform'] = projectionTransform;
hitMessage['hitDetection'] = true;
this.worker_.postMessage(hitMessage, [
this.hitRenderInstructions_.buffer,
]);
this.hitRenderInstructions_ = null;
}
};
/**
* @param {import("../../coordinate.js").Coordinate} coordinate Coordinate.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @param {number} hitTolerance Hit tolerance in pixels.
* @param {import("../vector.js").FeatureCallback<T>} callback Feature callback.
* @param {Array<import("../Map.js").HitMatch<T>>} matches The hit detected matches with tolerance.
* @return {T|undefined} Callback result.
* @template T
*/
WebGLPointsLayerRenderer.prototype.forEachFeatureAtCoordinate = function (coordinate, frameState, hitTolerance, callback, matches) {
assert(this.hitDetectionEnabled_, 66);
if (!this.hitRenderInstructions_) {
return undefined;
}
var pixel = applyTransform(frameState.coordinateToPixelTransform, coordinate.slice());
var data = this.hitRenderTarget_.readPixel(pixel[0] / 2, pixel[1] / 2);
var color = [data[0] / 255, data[1] / 255, data[2] / 255, data[3] / 255];
var index = colorDecodeId(color);
var opacity = this.hitRenderInstructions_[index];
var uid = Math.floor(opacity).toString();
var source = this.getLayer().getSource();
var feature = source.getFeatureByUid(uid);
if (feature) {
return callback(feature, this.getLayer(), null);
}
return undefined;
};
/**
* Render the hit detection data to the corresponding render target
* @param {import("../../PluggableMap.js").FrameState} frameState current frame state
* @param {number} startWorld the world to render in the first iteration
* @param {number} endWorld the last world to render
* @param {number} worldWidth the width of the worlds being rendered
*/
WebGLPointsLayerRenderer.prototype.renderHitDetection = function (frameState, startWorld, endWorld, worldWidth) {
// skip render entirely if vertex buffers not ready/generated yet
if (!this.hitVerticesBuffer_.getSize()) {
return;
}
var world = startWorld;
this.hitRenderTarget_.setSize([
Math.floor(frameState.size[0] / 2),
Math.floor(frameState.size[1] / 2),
]);
this.helper.useProgram(this.hitProgram_);
this.helper.prepareDrawToRenderTarget(frameState, this.hitRenderTarget_, true);
this.helper.bindBuffer(this.hitVerticesBuffer_);
this.helper.bindBuffer(this.indicesBuffer_);
this.helper.enableAttributes(this.hitDetectionAttributes);
do {
this.helper.makeProjectionTransform(frameState, this.currentTransform_);
translateTransform(this.currentTransform_, world * worldWidth, 0);
multiplyTransform(this.currentTransform_, this.invertRenderTransform_);
this.helper.applyUniforms(frameState);
var renderCount = this.indicesBuffer_.getSize();
this.helper.drawElements(0, renderCount);
} while (++world < endWorld);
};
/**
* Clean up.
*/
WebGLPointsLayerRenderer.prototype.disposeInternal = function () {
this.worker_.terminate();
this.layer_ = null;
this.sourceListenKeys_.forEach(function (key) {
unlistenByKey(key);
});
this.sourceListenKeys_ = null;
_super.prototype.disposeInternal.call(this);
};
return WebGLPointsLayerRenderer;
}(WebGLLayerRenderer));
export default WebGLPointsLayerRenderer;
//# sourceMappingURL=PointsLayer.js.map

1
node_modules/ol/renderer/webgl/PointsLayer.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

191
node_modules/ol/renderer/webgl/TileLayer.d.ts generated vendored Normal file
View File

@@ -0,0 +1,191 @@
export namespace Uniforms {
const TILE_TEXTURE_ARRAY: string;
const TILE_TRANSFORM: string;
const TRANSITION_ALPHA: string;
const DEPTH: string;
const TEXTURE_PIXEL_WIDTH: string;
const TEXTURE_PIXEL_HEIGHT: string;
const TEXTURE_RESOLUTION: string;
const TEXTURE_ORIGIN_X: string;
const TEXTURE_ORIGIN_Y: string;
const RENDER_EXTENT: string;
const RESOLUTION: string;
const ZOOM: string;
}
export namespace Attributes {
const TEXTURE_COORD: string;
}
export default WebGLTileLayerRenderer;
export type Options = {
/**
* Vertex shader source.
*/
vertexShader: string;
/**
* Fragment shader source.
*/
fragmentShader: string;
/**
* Additional uniforms
* made available to shaders.
*/
uniforms?: {
[x: string]: import("../../webgl/Helper.js").UniformValue;
} | undefined;
/**
* Palette textures.
*/
paletteTextures?: import("../../webgl/PaletteTexture.js").default[] | undefined;
/**
* The texture cache size.
*/
cacheSize?: number | undefined;
};
export type LayerType = import("../../layer/WebGLTile.js").default;
/**
* @typedef {Object} Options
* @property {string} vertexShader Vertex shader source.
* @property {string} fragmentShader Fragment shader source.
* @property {Object<string, import("../../webgl/Helper").UniformValue>} [uniforms] Additional uniforms
* made available to shaders.
* @property {Array<import("../../webgl/PaletteTexture.js").default>} [paletteTextures] Palette textures.
* @property {number} [cacheSize=512] The texture cache size.
*/
/**
* @typedef {import("../../layer/WebGLTile.js").default} LayerType
*/
/**
* @classdesc
* WebGL renderer for tile layers.
* @extends {WebGLLayerRenderer<LayerType>}
* @api
*/
declare class WebGLTileLayerRenderer extends WebGLLayerRenderer<import("../../layer/WebGLTile.js").default> {
/**
* @param {LayerType} tileLayer Tile layer.
* @param {Options} options Options.
*/
constructor(tileLayer: LayerType, options: Options);
/**
* The last call to `renderFrame` was completed with all tiles loaded
* @type {boolean}
*/
renderComplete: boolean;
/**
* This transform converts texture coordinates to screen coordinates.
* @type {import("../../transform.js").Transform}
* @private
*/
private tileTransform_;
/**
* @type {Array<number>}
* @private
*/
private tempMat4_;
/**
* @type {import("../../TileRange.js").default}
* @private
*/
private tempTileRange_;
/**
* @type {import("../../tilecoord.js").TileCoord}
* @private
*/
private tempTileCoord_;
/**
* @type {import("../../size.js").Size}
* @private
*/
private tempSize_;
/**
* @type {WebGLProgram}
* @private
*/
private program_;
/**
* @private
*/
private vertexShader_;
/**
* @private
*/
private fragmentShader_;
/**
* Tiles are rendered as a quad with the following structure:
*
* [P3]---------[P2]
* |` |
* | ` B |
* | ` |
* | ` |
* | A ` |
* | ` |
* [P0]---------[P1]
*
* Triangle A: P0, P1, P3
* Triangle B: P1, P2, P3
*
* @private
*/
private indices_;
/**
* @type {import("../../structs/LRUCache.js").default<import("../../webgl/TileTexture.js").default>}
* @private
*/
private tileTextureCache_;
/**
* @type {Array<import("../../webgl/PaletteTexture.js").default>}
* @private
*/
private paletteTextures_;
/**
* @private
* @type {import("../../PluggableMap.js").FrameState|null}
*/
private frameState_;
/**
* @param {Options} options Options.
*/
reset(options: Options): void;
/**
* @param {import("../../webgl/TileTexture").TileType} tile Tile.
* @return {boolean} Tile is drawable.
* @private
*/
private isDrawableTile_;
/**
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @param {import("../../extent.js").Extent} extent The extent to be rendered.
* @param {number} initialZ The zoom level.
* @param {Object<number, Array<TileTexture>>} tileTexturesByZ The zoom level.
*/
enqueueTiles(frameState: import("../../PluggableMap.js").FrameState, extent: import("../../extent.js").Extent, initialZ: number, tileTexturesByZ: {
[x: number]: Array<TileTexture>;
}): void;
/**
* Render the layer.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {HTMLElement} The rendered element.
*/
renderFrame(frameState: import("../../PluggableMap.js").FrameState): HTMLElement;
/**
* @param {import("../../pixel.js").Pixel} pixel Pixel.
* @return {Uint8ClampedArray|Uint8Array|Float32Array|DataView} Data at the pixel location.
*/
getData(pixel: import("../../pixel.js").Pixel): Uint8ClampedArray | Uint8Array | Float32Array | DataView;
/**
* Look for tiles covering the provided tile coordinate at an alternate
* zoom level. Loaded tiles will be added to the provided tile texture lookup.
* @param {import("../../tilegrid/TileGrid.js").default} tileGrid The tile grid.
* @param {import("../../tilecoord.js").TileCoord} tileCoord The target tile coordinate.
* @param {number} altZ The alternate zoom level.
* @param {Object<number, Array<import("../../webgl/TileTexture.js").default>>} tileTexturesByZ Lookup of
* tile textures by zoom level.
* @return {boolean} The tile coordinate is covered by loaded tiles at the alternate zoom level.
* @private
*/
private findAltTiles_;
}
import WebGLLayerRenderer from "./Layer.js";
import TileTexture from "../../webgl/TileTexture.js";
//# sourceMappingURL=TileLayer.d.ts.map

1
node_modules/ol/renderer/webgl/TileLayer.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"TileLayer.d.ts","sourceRoot":"","sources":["../../src/renderer/webgl/TileLayer.js"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;kBAiIc,MAAM;;;;oBACN,MAAM;;;;;;;;;;;;;;;;;wBAQP,OAAO,0BAA0B,EAAE,OAAO;AAXvD;;;;;;;;GAQG;AAEH;;GAEG;AAEH;;;;;GAKG;AACH;IACE;;;OAGG;IACH,uBAHW,SAAS,WACT,OAAO,EAmGjB;IA5FC;;;OAGG;IACH,gBAFU,OAAO,CAEU;IAE3B;;;;OAIG;IACH,uBAAuC;IAEvC;;;OAGG;IACH,kBAA6B;IAE7B;;;OAGG;IACH,uBAA+C;IAE/C;;;OAGG;IACH,uBAA8C;IAE9C;;;OAGG;IACH,kBAAuB;IAEvB;;;OAGG;IACH,iBAAa;IAEb;;OAEG;IACH,sBAAyC;IAEzC;;OAEG;IACH,wBAA6C;IAE7C;;;;;;;;;;;;;;;;OAgBG;IACH,iBAAuE;IAKvE;;;OAGG;IACH,0BAAgD;IAEhD;;;OAGG;IACH,yBAAqD;IAErD;;;OAGG;IACH,oBAAuB;IAGzB;;OAEG;IACH,eAFW,OAAO,QAgBjB;IAWD;;;;OAIG;IACH,wBASC;IAoBD;;;;;OAKG;IACH,yBALW,OAAO,uBAAuB,EAAE,UAAU,UAC1C,OAAO,iBAAiB,EAAE,MAAM,YAChC,MAAM;YACC,MAAM,GAAE,MAAM,WAAW,CAAC;aA6F3C;IAED;;;;OAIG;IACH,wBAHW,OAAO,uBAAuB,EAAE,UAAU,GACzC,WAAW,CA4QtB;IAED;;;OAGG;IACH,eAHW,OAAO,gBAAgB,EAAE,KAAK,GAC7B,iBAAiB,GAAC,UAAU,GAAC,YAAY,GAAC,QAAQ,CAqF7D;IAED;;;;;;;;;;OAUG;IACH,sBA+BC;CA+BF"}

641
node_modules/ol/renderer/webgl/TileLayer.js generated vendored Normal file
View File

@@ -0,0 +1,641 @@
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
/**
* @module ol/renderer/webgl/TileLayer
*/
import LRUCache from '../../structs/LRUCache.js';
import TileRange from '../../TileRange.js';
import TileState from '../../TileState.js';
import TileTexture from '../../webgl/TileTexture.js';
import WebGLArrayBuffer from '../../webgl/Buffer.js';
import WebGLLayerRenderer from './Layer.js';
import { AttributeType } from '../../webgl/Helper.js';
import { ELEMENT_ARRAY_BUFFER, STATIC_DRAW } from '../../webgl.js';
import { apply as applyTransform, create as createTransform, reset as resetTransform, rotate as rotateTransform, scale as scaleTransform, translate as translateTransform, } from '../../transform.js';
import { boundingExtent, containsCoordinate, getIntersection, isEmpty, } from '../../extent.js';
import { create as createMat4, fromTransform as mat4FromTransform, } from '../../vec/mat4.js';
import { createOrUpdate as createTileCoord, getKey as getTileCoordKey, } from '../../tilecoord.js';
import { fromUserExtent } from '../../proj.js';
import { getUid } from '../../util.js';
import { numberSafeCompareFunction } from '../../array.js';
import { toSize } from '../../size.js';
export var Uniforms = {
TILE_TEXTURE_ARRAY: 'u_tileTextures',
TILE_TRANSFORM: 'u_tileTransform',
TRANSITION_ALPHA: 'u_transitionAlpha',
DEPTH: 'u_depth',
TEXTURE_PIXEL_WIDTH: 'u_texturePixelWidth',
TEXTURE_PIXEL_HEIGHT: 'u_texturePixelHeight',
TEXTURE_RESOLUTION: 'u_textureResolution',
TEXTURE_ORIGIN_X: 'u_textureOriginX',
TEXTURE_ORIGIN_Y: 'u_textureOriginY',
RENDER_EXTENT: 'u_renderExtent',
RESOLUTION: 'u_resolution',
ZOOM: 'u_zoom',
};
export var Attributes = {
TEXTURE_COORD: 'a_textureCoord',
};
/**
* @type {Array<import('../../webgl/Helper.js').AttributeDescription>}
*/
var attributeDescriptions = [
{
name: Attributes.TEXTURE_COORD,
size: 2,
type: AttributeType.FLOAT,
},
];
/**
* @type {Object<string, boolean>}
*/
var empty = {};
/**
* Transform a zoom level into a depth value ranging from -1 to 1.
* @param {number} z A zoom level.
* @return {number} A depth value.
*/
function depthForZ(z) {
return 2 * (1 - 1 / (z + 1)) - 1;
}
/**
* Add a tile texture to the lookup.
* @param {Object<number, Array<import("../../webgl/TileTexture.js").default>>} tileTexturesByZ Lookup of
* tile textures by zoom level.
* @param {import("../../webgl/TileTexture.js").default} tileTexture A tile texture.
* @param {number} z The zoom level.
*/
function addTileTextureToLookup(tileTexturesByZ, tileTexture, z) {
if (!(z in tileTexturesByZ)) {
tileTexturesByZ[z] = [];
}
tileTexturesByZ[z].push(tileTexture);
}
/**
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @param {import("../../extent.js").Extent} extent The frame extent.
* @return {import("../../extent.js").Extent} Frame extent intersected with layer extents.
*/
function getRenderExtent(frameState, extent) {
var layerState = frameState.layerStatesArray[frameState.layerIndex];
if (layerState.extent) {
extent = getIntersection(extent, fromUserExtent(layerState.extent, frameState.viewState.projection));
}
var source = /** @type {import("../../source/Tile.js").default} */ (layerState.layer.getRenderSource());
if (!source.getWrapX()) {
var gridExtent = source
.getTileGridForProjection(frameState.viewState.projection)
.getExtent();
if (gridExtent) {
extent = getIntersection(extent, gridExtent);
}
}
return extent;
}
function getCacheKey(source, tileCoord) {
return "".concat(source.getKey(), ",").concat(getTileCoordKey(tileCoord));
}
/**
* @typedef {Object} Options
* @property {string} vertexShader Vertex shader source.
* @property {string} fragmentShader Fragment shader source.
* @property {Object<string, import("../../webgl/Helper").UniformValue>} [uniforms] Additional uniforms
* made available to shaders.
* @property {Array<import("../../webgl/PaletteTexture.js").default>} [paletteTextures] Palette textures.
* @property {number} [cacheSize=512] The texture cache size.
*/
/**
* @typedef {import("../../layer/WebGLTile.js").default} LayerType
*/
/**
* @classdesc
* WebGL renderer for tile layers.
* @extends {WebGLLayerRenderer<LayerType>}
* @api
*/
var WebGLTileLayerRenderer = /** @class */ (function (_super) {
__extends(WebGLTileLayerRenderer, _super);
/**
* @param {LayerType} tileLayer Tile layer.
* @param {Options} options Options.
*/
function WebGLTileLayerRenderer(tileLayer, options) {
var _this = _super.call(this, tileLayer, {
uniforms: options.uniforms,
}) || this;
/**
* The last call to `renderFrame` was completed with all tiles loaded
* @type {boolean}
*/
_this.renderComplete = false;
/**
* This transform converts texture coordinates to screen coordinates.
* @type {import("../../transform.js").Transform}
* @private
*/
_this.tileTransform_ = createTransform();
/**
* @type {Array<number>}
* @private
*/
_this.tempMat4_ = createMat4();
/**
* @type {import("../../TileRange.js").default}
* @private
*/
_this.tempTileRange_ = new TileRange(0, 0, 0, 0);
/**
* @type {import("../../tilecoord.js").TileCoord}
* @private
*/
_this.tempTileCoord_ = createTileCoord(0, 0, 0);
/**
* @type {import("../../size.js").Size}
* @private
*/
_this.tempSize_ = [0, 0];
/**
* @type {WebGLProgram}
* @private
*/
_this.program_;
/**
* @private
*/
_this.vertexShader_ = options.vertexShader;
/**
* @private
*/
_this.fragmentShader_ = options.fragmentShader;
/**
* Tiles are rendered as a quad with the following structure:
*
* [P3]---------[P2]
* |` |
* | ` B |
* | ` |
* | ` |
* | A ` |
* | ` |
* [P0]---------[P1]
*
* Triangle A: P0, P1, P3
* Triangle B: P1, P2, P3
*
* @private
*/
_this.indices_ = new WebGLArrayBuffer(ELEMENT_ARRAY_BUFFER, STATIC_DRAW);
_this.indices_.fromArray([0, 1, 3, 1, 2, 3]);
var cacheSize = options.cacheSize !== undefined ? options.cacheSize : 512;
/**
* @type {import("../../structs/LRUCache.js").default<import("../../webgl/TileTexture.js").default>}
* @private
*/
_this.tileTextureCache_ = new LRUCache(cacheSize);
/**
* @type {Array<import("../../webgl/PaletteTexture.js").default>}
* @private
*/
_this.paletteTextures_ = options.paletteTextures || [];
/**
* @private
* @type {import("../../PluggableMap.js").FrameState|null}
*/
_this.frameState_ = null;
return _this;
}
/**
* @param {Options} options Options.
*/
WebGLTileLayerRenderer.prototype.reset = function (options) {
_super.prototype.reset.call(this, {
uniforms: options.uniforms,
});
this.vertexShader_ = options.vertexShader;
this.fragmentShader_ = options.fragmentShader;
this.paletteTextures_ = options.paletteTextures || [];
if (this.helper) {
this.program_ = this.helper.getProgram(this.fragmentShader_, this.vertexShader_);
}
};
WebGLTileLayerRenderer.prototype.afterHelperCreated = function () {
this.program_ = this.helper.getProgram(this.fragmentShader_, this.vertexShader_);
this.helper.flushBufferData(this.indices_);
};
/**
* @param {import("../../webgl/TileTexture").TileType} tile Tile.
* @return {boolean} Tile is drawable.
* @private
*/
WebGLTileLayerRenderer.prototype.isDrawableTile_ = function (tile) {
var tileLayer = this.getLayer();
var tileState = tile.getState();
var useInterimTilesOnError = tileLayer.getUseInterimTilesOnError();
return (tileState == TileState.LOADED ||
tileState == TileState.EMPTY ||
(tileState == TileState.ERROR && !useInterimTilesOnError));
};
/**
* Determine whether renderFrame should be called.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {boolean} Layer is ready to be rendered.
*/
WebGLTileLayerRenderer.prototype.prepareFrameInternal = function (frameState) {
var layer = this.getLayer();
var source = layer.getRenderSource();
if (!source) {
return false;
}
if (isEmpty(getRenderExtent(frameState, frameState.extent))) {
return false;
}
return source.getState() === 'ready';
};
/**
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @param {import("../../extent.js").Extent} extent The extent to be rendered.
* @param {number} initialZ The zoom level.
* @param {Object<number, Array<TileTexture>>} tileTexturesByZ The zoom level.
*/
WebGLTileLayerRenderer.prototype.enqueueTiles = function (frameState, extent, initialZ, tileTexturesByZ) {
var viewState = frameState.viewState;
var tileLayer = this.getLayer();
var tileSource = tileLayer.getRenderSource();
var tileGrid = tileSource.getTileGridForProjection(viewState.projection);
var gutter = tileSource.getGutterForProjection(viewState.projection);
var tileSourceKey = getUid(tileSource);
if (!(tileSourceKey in frameState.wantedTiles)) {
frameState.wantedTiles[tileSourceKey] = {};
}
var wantedTiles = frameState.wantedTiles[tileSourceKey];
var tileTextureCache = this.tileTextureCache_;
var minZ = Math.max(initialZ - tileLayer.getPreload(), tileGrid.getMinZoom(), tileLayer.getMinZoom());
for (var z = initialZ; z >= minZ; --z) {
var tileRange = tileGrid.getTileRangeForExtentAndZ(extent, z, this.tempTileRange_);
var tileResolution = tileGrid.getResolution(z);
for (var x = tileRange.minX; x <= tileRange.maxX; ++x) {
for (var y = tileRange.minY; y <= tileRange.maxY; ++y) {
var tileCoord = createTileCoord(z, x, y, this.tempTileCoord_);
var cacheKey = getCacheKey(tileSource, tileCoord);
/** @type {TileTexture} */
var tileTexture = void 0;
/** @type {import("../../webgl/TileTexture").TileType} */
var tile = void 0;
if (tileTextureCache.containsKey(cacheKey)) {
tileTexture = tileTextureCache.get(cacheKey);
tile = tileTexture.tile;
}
if (!tileTexture || tileTexture.tile.key !== tileSource.getKey()) {
tile = tileSource.getTile(z, x, y, frameState.pixelRatio, viewState.projection);
if (!tileTexture) {
tileTexture = new TileTexture({
tile: tile,
grid: tileGrid,
helper: this.helper,
gutter: gutter,
});
tileTextureCache.set(cacheKey, tileTexture);
}
else {
if (this.isDrawableTile_(tile)) {
tileTexture.setTile(tile);
}
else {
var interimTile =
/** @type {import("../../webgl/TileTexture").TileType} */ (tile.getInterimTile());
tileTexture.setTile(interimTile);
}
}
}
addTileTextureToLookup(tileTexturesByZ, tileTexture, z);
var tileQueueKey = tile.getKey();
wantedTiles[tileQueueKey] = true;
if (tile.getState() === TileState.IDLE) {
if (!frameState.tileQueue.isKeyQueued(tileQueueKey)) {
frameState.tileQueue.enqueue([
tile,
tileSourceKey,
tileGrid.getTileCoordCenter(tileCoord),
tileResolution,
]);
}
}
}
}
}
};
/**
* Render the layer.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
* @return {HTMLElement} The rendered element.
*/
WebGLTileLayerRenderer.prototype.renderFrame = function (frameState) {
this.frameState_ = frameState;
this.renderComplete = true;
var gl = this.helper.getGL();
this.preRender(gl, frameState);
var viewState = frameState.viewState;
var tileLayer = this.getLayer();
var tileSource = tileLayer.getRenderSource();
var tileGrid = tileSource.getTileGridForProjection(viewState.projection);
var gutter = tileSource.getGutterForProjection(viewState.projection);
var extent = getRenderExtent(frameState, frameState.extent);
var z = tileGrid.getZForResolution(viewState.resolution, tileSource.zDirection);
/**
* @type {Object<number, Array<import("../../webgl/TileTexture.js").default>>}
*/
var tileTexturesByZ = {};
if (frameState.nextExtent) {
var targetZ = tileGrid.getZForResolution(viewState.nextResolution, tileSource.zDirection);
var nextExtent = getRenderExtent(frameState, frameState.nextExtent);
this.enqueueTiles(frameState, nextExtent, targetZ, tileTexturesByZ);
}
this.enqueueTiles(frameState, extent, z, tileTexturesByZ);
/**
* A lookup of alpha values for tiles at the target rendering resolution
* for tiles that are in transition. If a tile coord key is absent from
* this lookup, the tile should be rendered at alpha 1.
* @type {Object<string, number>}
*/
var alphaLookup = {};
var uid = getUid(this);
var time = frameState.time;
var blend = false;
// look for cached tiles to use if a target tile is not ready
var tileTextures = tileTexturesByZ[z];
for (var i = 0, ii = tileTextures.length; i < ii; ++i) {
var tileTexture = tileTextures[i];
var tile = tileTexture.tile;
var tileCoord = tile.tileCoord;
if (tileTexture.loaded) {
var alpha = tile.getAlpha(uid, time);
if (alpha === 1) {
// no need to look for alt tiles
tile.endTransition(uid);
continue;
}
blend = true;
var tileCoordKey = getTileCoordKey(tileCoord);
alphaLookup[tileCoordKey] = alpha;
}
this.renderComplete = false;
// first look for child tiles (at z + 1)
var coveredByChildren = this.findAltTiles_(tileGrid, tileCoord, z + 1, tileTexturesByZ);
if (coveredByChildren) {
continue;
}
// next look for parent tiles
var minZoom = tileGrid.getMinZoom();
for (var parentZ = z - 1; parentZ >= minZoom; --parentZ) {
var coveredByParent = this.findAltTiles_(tileGrid, tileCoord, parentZ, tileTexturesByZ);
if (coveredByParent) {
break;
}
}
}
this.helper.useProgram(this.program_);
this.helper.prepareDraw(frameState, !blend);
var zs = Object.keys(tileTexturesByZ)
.map(Number)
.sort(numberSafeCompareFunction);
var centerX = viewState.center[0];
var centerY = viewState.center[1];
for (var j = 0, jj = zs.length; j < jj; ++j) {
var tileZ = zs[j];
var tileResolution = tileGrid.getResolution(tileZ);
var tileSize = toSize(tileGrid.getTileSize(tileZ), this.tempSize_);
var tileOrigin = tileGrid.getOrigin(tileZ);
var tileWidthWithGutter = tileSize[0] + 2 * gutter;
var tileHeightWithGutter = tileSize[1] + 2 * gutter;
var aspectRatio = tileWidthWithGutter / tileHeightWithGutter;
var centerI = (centerX - tileOrigin[0]) / (tileSize[0] * tileResolution);
var centerJ = (tileOrigin[1] - centerY) / (tileSize[1] * tileResolution);
var tileScale = viewState.resolution / tileResolution;
var depth = depthForZ(tileZ);
var tileTextures_1 = tileTexturesByZ[tileZ];
for (var i = 0, ii = tileTextures_1.length; i < ii; ++i) {
var tileTexture = tileTextures_1[i];
if (!tileTexture.loaded) {
continue;
}
var tile = tileTexture.tile;
var tileCoord = tile.tileCoord;
var tileCoordKey = getTileCoordKey(tileCoord);
var tileCenterI = tileCoord[1];
var tileCenterJ = tileCoord[2];
resetTransform(this.tileTransform_);
scaleTransform(this.tileTransform_, 2 / ((frameState.size[0] * tileScale) / tileWidthWithGutter), -2 / ((frameState.size[1] * tileScale) / tileWidthWithGutter));
rotateTransform(this.tileTransform_, viewState.rotation);
scaleTransform(this.tileTransform_, 1, 1 / aspectRatio);
translateTransform(this.tileTransform_, (tileSize[0] * (tileCenterI - centerI) - gutter) /
tileWidthWithGutter, (tileSize[1] * (tileCenterJ - centerJ) - gutter) /
tileHeightWithGutter);
this.helper.setUniformMatrixValue(Uniforms.TILE_TRANSFORM, mat4FromTransform(this.tempMat4_, this.tileTransform_));
this.helper.bindBuffer(tileTexture.coords);
this.helper.bindBuffer(this.indices_);
this.helper.enableAttributes(attributeDescriptions);
var textureSlot = 0;
while (textureSlot < tileTexture.textures.length) {
var textureProperty = 'TEXTURE' + textureSlot;
var uniformName = "".concat(Uniforms.TILE_TEXTURE_ARRAY, "[").concat(textureSlot, "]");
gl.activeTexture(gl[textureProperty]);
gl.bindTexture(gl.TEXTURE_2D, tileTexture.textures[textureSlot]);
gl.uniform1i(this.helper.getUniformLocation(uniformName), textureSlot);
++textureSlot;
}
for (var paletteIndex = 0; paletteIndex < this.paletteTextures_.length; ++paletteIndex) {
var paletteTexture = this.paletteTextures_[paletteIndex];
gl.activeTexture(gl['TEXTURE' + textureSlot]);
var texture = paletteTexture.getTexture(gl);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(this.helper.getUniformLocation(paletteTexture.name), textureSlot);
++textureSlot;
}
var alpha = tileCoordKey in alphaLookup ? alphaLookup[tileCoordKey] : 1;
if (alpha < 1) {
frameState.animate = true;
}
this.helper.setUniformFloatValue(Uniforms.TRANSITION_ALPHA, alpha);
this.helper.setUniformFloatValue(Uniforms.DEPTH, depth);
this.helper.setUniformFloatValue(Uniforms.TEXTURE_PIXEL_WIDTH, tileWidthWithGutter);
this.helper.setUniformFloatValue(Uniforms.TEXTURE_PIXEL_HEIGHT, tileHeightWithGutter);
this.helper.setUniformFloatValue(Uniforms.TEXTURE_RESOLUTION, tileResolution);
this.helper.setUniformFloatValue(Uniforms.TEXTURE_ORIGIN_X, tileOrigin[0] +
tileCenterI * tileSize[0] * tileResolution -
gutter * tileResolution);
this.helper.setUniformFloatValue(Uniforms.TEXTURE_ORIGIN_Y, tileOrigin[1] -
tileCenterJ * tileSize[1] * tileResolution +
gutter * tileResolution);
var gutterExtent = extent;
if (gutter > 0) {
gutterExtent = tileGrid.getTileCoordExtent(tileCoord);
getIntersection(gutterExtent, extent, gutterExtent);
}
this.helper.setUniformFloatVec4(Uniforms.RENDER_EXTENT, gutterExtent);
this.helper.setUniformFloatValue(Uniforms.RESOLUTION, viewState.resolution);
this.helper.setUniformFloatValue(Uniforms.ZOOM, viewState.zoom);
this.helper.drawElements(0, this.indices_.getSize());
}
}
this.helper.finalizeDraw(frameState, this.dispatchPreComposeEvent, this.dispatchPostComposeEvent);
var canvas = this.helper.getCanvas();
var tileTextureCache = this.tileTextureCache_;
while (tileTextureCache.canExpireCache()) {
var tileTexture = tileTextureCache.pop();
tileTexture.dispose();
}
// TODO: let the renderers manage their own cache instead of managing the source cache
/**
* Here we unconditionally expire the source cache since the renderer maintains
* its own cache.
* @param {import("../../PluggableMap.js").default} map Map.
* @param {import("../../PluggableMap.js").FrameState} frameState Frame state.
*/
var postRenderFunction = function (map, frameState) {
tileSource.expireCache(frameState.viewState.projection, empty);
};
frameState.postRenderFunctions.push(postRenderFunction);
this.postRender(gl, frameState);
return canvas;
};
/**
* @param {import("../../pixel.js").Pixel} pixel Pixel.
* @return {Uint8ClampedArray|Uint8Array|Float32Array|DataView} Data at the pixel location.
*/
WebGLTileLayerRenderer.prototype.getData = function (pixel) {
var gl = this.helper.getGL();
if (!gl) {
return null;
}
var frameState = this.frameState_;
if (!frameState) {
return null;
}
var layer = this.getLayer();
var coordinate = applyTransform(frameState.pixelToCoordinateTransform, pixel.slice());
var viewState = frameState.viewState;
var layerExtent = layer.getExtent();
if (layerExtent) {
if (!containsCoordinate(fromUserExtent(layerExtent, viewState.projection), coordinate)) {
return null;
}
}
// determine last source suitable for rendering at coordinate
var sources = layer.getSources(boundingExtent([coordinate]), viewState.resolution);
var i, source, tileGrid;
for (i = sources.length - 1; i >= 0; --i) {
source = sources[i];
if (source.getState() === 'ready') {
tileGrid = source.getTileGridForProjection(viewState.projection);
if (source.getWrapX()) {
break;
}
var gridExtent = tileGrid.getExtent();
if (!gridExtent || containsCoordinate(gridExtent, coordinate)) {
break;
}
}
}
if (i < 0) {
return null;
}
var tileTextureCache = this.tileTextureCache_;
for (var z = tileGrid.getZForResolution(viewState.resolution); z >= tileGrid.getMinZoom(); --z) {
var tileCoord = tileGrid.getTileCoordForCoordAndZ(coordinate, z);
var cacheKey = getCacheKey(source, tileCoord);
if (!tileTextureCache.containsKey(cacheKey)) {
continue;
}
var tileTexture = tileTextureCache.get(cacheKey);
if (!tileTexture.loaded) {
continue;
}
var tileOrigin = tileGrid.getOrigin(z);
var tileSize = toSize(tileGrid.getTileSize(z));
var tileResolution = tileGrid.getResolution(z);
var col = (coordinate[0] - tileOrigin[0]) / tileResolution -
tileCoord[1] * tileSize[0];
var row = (tileOrigin[1] - coordinate[1]) / tileResolution -
tileCoord[2] * tileSize[1];
return tileTexture.getPixelData(col, row);
}
return null;
};
/**
* Look for tiles covering the provided tile coordinate at an alternate
* zoom level. Loaded tiles will be added to the provided tile texture lookup.
* @param {import("../../tilegrid/TileGrid.js").default} tileGrid The tile grid.
* @param {import("../../tilecoord.js").TileCoord} tileCoord The target tile coordinate.
* @param {number} altZ The alternate zoom level.
* @param {Object<number, Array<import("../../webgl/TileTexture.js").default>>} tileTexturesByZ Lookup of
* tile textures by zoom level.
* @return {boolean} The tile coordinate is covered by loaded tiles at the alternate zoom level.
* @private
*/
WebGLTileLayerRenderer.prototype.findAltTiles_ = function (tileGrid, tileCoord, altZ, tileTexturesByZ) {
var tileRange = tileGrid.getTileRangeForTileCoordAndZ(tileCoord, altZ, this.tempTileRange_);
if (!tileRange) {
return false;
}
var covered = true;
var tileTextureCache = this.tileTextureCache_;
var source = this.getLayer().getRenderSource();
for (var x = tileRange.minX; x <= tileRange.maxX; ++x) {
for (var y = tileRange.minY; y <= tileRange.maxY; ++y) {
var cacheKey = getCacheKey(source, [altZ, x, y]);
var loaded = false;
if (tileTextureCache.containsKey(cacheKey)) {
var tileTexture = tileTextureCache.get(cacheKey);
if (tileTexture.loaded) {
addTileTextureToLookup(tileTexturesByZ, tileTexture, altZ);
loaded = true;
}
}
if (!loaded) {
covered = false;
}
}
}
return covered;
};
WebGLTileLayerRenderer.prototype.removeHelper = function () {
if (this.helper) {
var tileTextureCache = this.tileTextureCache_;
tileTextureCache.forEach(function (tileTexture) { return tileTexture.dispose(); });
tileTextureCache.clear();
}
_super.prototype.removeHelper.call(this);
};
/**
* Clean up.
*/
WebGLTileLayerRenderer.prototype.disposeInternal = function () {
var helper = this.helper;
if (helper) {
var gl = helper.getGL();
gl.deleteProgram(this.program_);
delete this.program_;
helper.deleteBuffer(this.indices_);
}
_super.prototype.disposeInternal.call(this);
delete this.indices_;
delete this.tileTextureCache_;
delete this.frameState_;
};
return WebGLTileLayerRenderer;
}(WebGLLayerRenderer));
export default WebGLTileLayerRenderer;
//# sourceMappingURL=TileLayer.js.map

1
node_modules/ol/renderer/webgl/TileLayer.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long