planning
All checks were successful
Publish To Prod / deploy_and_publish (push) Successful in 35s

This commit is contained in:
2024-10-14 09:15:30 +02:00
parent bcba00a730
commit 6e64e138e2
21059 changed files with 2317811 additions and 1 deletions

143
node_modules/ol/src/webgl/Buffer.js generated vendored Normal file
View File

@@ -0,0 +1,143 @@
/**
* @module ol/webgl/Buffer
*/
import {
ARRAY_BUFFER,
DYNAMIC_DRAW,
ELEMENT_ARRAY_BUFFER,
STATIC_DRAW,
STREAM_DRAW,
} from '../webgl.js';
import {assert} from '../asserts.js';
/**
* Used to describe the intended usage for the data: `STATIC_DRAW`, `STREAM_DRAW`
* or `DYNAMIC_DRAW`.
* @enum {number}
*/
export const BufferUsage = {
STATIC_DRAW: STATIC_DRAW,
STREAM_DRAW: STREAM_DRAW,
DYNAMIC_DRAW: DYNAMIC_DRAW,
};
/**
* @classdesc
* Object used to store an array of data as well as usage information for that data.
* Stores typed arrays internally, either Float32Array or Uint16/32Array depending on
* the buffer type (ARRAY_BUFFER or ELEMENT_ARRAY_BUFFER) and available extensions.
*
* To populate the array, you can either use:
* * A size using `#ofSize(buffer)`
* * An `ArrayBuffer` object using `#fromArrayBuffer(buffer)`
* * A plain array using `#fromArray(array)`
*
* Note:
* See the documentation of [WebGLRenderingContext.bufferData](https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/bufferData)
* for more info on buffer usage.
* @api
*/
class WebGLArrayBuffer {
/**
* @param {number} type Buffer type, either ARRAY_BUFFER or ELEMENT_ARRAY_BUFFER.
* @param {number} [opt_usage] Intended usage, either `STATIC_DRAW`, `STREAM_DRAW` or `DYNAMIC_DRAW`.
* Default is `DYNAMIC_DRAW`.
*/
constructor(type, opt_usage) {
/**
* @private
* @type {Float32Array|Uint32Array}
*/
this.array = null;
/**
* @private
* @type {number}
*/
this.type = type;
assert(type === ARRAY_BUFFER || type === ELEMENT_ARRAY_BUFFER, 62);
/**
* @private
* @type {number}
*/
this.usage = opt_usage !== undefined ? opt_usage : BufferUsage.STATIC_DRAW;
}
/**
* Populates the buffer with an array of the given size (all values will be zeroes).
* @param {number} size Array size
*/
ofSize(size) {
this.array = new (getArrayClassForType(this.type))(size);
}
/**
* Populates the buffer with an array of the given size (all values will be zeroes).
* @param {Array<number>} array Numerical array
*/
fromArray(array) {
const arrayClass = getArrayClassForType(this.type);
this.array = arrayClass.from
? arrayClass.from(array)
: new arrayClass(array);
}
/**
* Populates the buffer with a raw binary array buffer.
* @param {ArrayBuffer} buffer Raw binary buffer to populate the array with. Note that this buffer must have been
* initialized for the same typed array class.
*/
fromArrayBuffer(buffer) {
this.array = new (getArrayClassForType(this.type))(buffer);
}
/**
* @return {number} Buffer type.
*/
getType() {
return this.type;
}
/**
* Will return null if the buffer was not initialized
* @return {Float32Array|Uint32Array} Array.
*/
getArray() {
return this.array;
}
/**
* @return {number} Usage.
*/
getUsage() {
return this.usage;
}
/**
* Will return 0 if the buffer is not initialized
* @return {number} Array size
*/
getSize() {
return this.array ? this.array.length : 0;
}
}
/**
* Returns a typed array constructor based on the given buffer type
* @param {number} type Buffer type, either ARRAY_BUFFER or ELEMENT_ARRAY_BUFFER.
* @return {Float32ArrayConstructor|Uint32ArrayConstructor} The typed array class to use for this buffer.
*/
export function getArrayClassForType(type) {
switch (type) {
case ARRAY_BUFFER:
return Float32Array;
case ELEMENT_ARRAY_BUFFER:
return Uint32Array;
default:
return Float32Array;
}
}
export default WebGLArrayBuffer;

11
node_modules/ol/src/webgl/ContextEventType.js generated vendored Normal file
View File

@@ -0,0 +1,11 @@
/**
* @module ol/webgl/ContextEventType
*/
/**
* @enum {string}
*/
export default {
LOST: 'webglcontextlost',
RESTORED: 'webglcontextrestored',
};

1136
node_modules/ol/src/webgl/Helper.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

50
node_modules/ol/src/webgl/PaletteTexture.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
/**
* @module ol/webgl/PaletteTexture
*/
class PaletteTexture {
/**
* @param {string} name The name of the texture.
* @param {Uint8Array} data The texture data.
*/
constructor(name, data) {
this.name = name;
this.data = data;
/**
* @type {WebGLTexture}
* @private
*/
this.texture_ = null;
}
/**
* @param {WebGLRenderingContext} gl Rendering context.
* @return {WebGLTexture} The texture.
*/
getTexture(gl) {
if (!this.texture_) {
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
this.data.length / 4,
1,
0,
gl.RGBA,
gl.UNSIGNED_BYTE,
this.data
);
this.texture_ = texture;
}
return this.texture_;
}
}
export default PaletteTexture;

406
node_modules/ol/src/webgl/PostProcessingPass.js generated vendored Normal file
View File

@@ -0,0 +1,406 @@
/**
* @module ol/webgl/PostProcessingPass
*/
import {getUid} from '../util.js';
const DEFAULT_VERTEX_SHADER = `
precision mediump float;
attribute vec2 a_position;
varying vec2 v_texCoord;
varying vec2 v_screenCoord;
uniform vec2 u_screenSize;
void main() {
v_texCoord = a_position * 0.5 + 0.5;
v_screenCoord = v_texCoord * u_screenSize;
gl_Position = vec4(a_position, 0.0, 1.0);
}
`;
const DEFAULT_FRAGMENT_SHADER = `
precision mediump float;
uniform sampler2D u_image;
uniform float u_opacity;
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord) * u_opacity;
}
`;
/**
* @typedef {Object} Options
* @property {WebGLRenderingContext} webGlContext WebGL context; mandatory.
* @property {number} [scaleRatio] Scale ratio; if < 1, the post process will render to a texture smaller than
* the main canvas that will then be sampled up (useful for saving resource on blur steps).
* @property {string} [vertexShader] Vertex shader source
* @property {string} [fragmentShader] Fragment shader source
* @property {Object<string,import("./Helper").UniformValue>} [uniforms] Uniform definitions for the post process step
*/
/**
* @typedef {Object} UniformInternalDescription
* @property {import("./Helper").UniformValue} value Value
* @property {number} location Location
* @property {WebGLTexture} [texture] Texture
* @private
*/
/**
* @classdesc
* This class is used to define Post Processing passes with custom shaders and uniforms.
* This is used internally by {@link module:ol/webgl/Helper~WebGLHelper}.
*
* Please note that the final output on the DOM canvas is expected to have premultiplied alpha, which means that
* a pixel which is 100% red with an opacity of 50% must have a color of (r=0.5, g=0, b=0, a=0.5).
* Failing to provide pixel colors with premultiplied alpha will result in render anomalies.
*
* The default post-processing pass does *not* multiply color values with alpha value, it expects color values to be
* premultiplied.
*
* Default shaders are shown hereafter:
*
* * Vertex shader:
*
* ```
* precision mediump float;
*
* attribute vec2 a_position;
* varying vec2 v_texCoord;
* varying vec2 v_screenCoord;
*
* uniform vec2 u_screenSize;
*
* void main() {
* v_texCoord = a_position * 0.5 + 0.5;
* v_screenCoord = v_texCoord * u_screenSize;
* gl_Position = vec4(a_position, 0.0, 1.0);
* }
* ```
*
* * Fragment shader:
*
* ```
* precision mediump float;
*
* uniform sampler2D u_image;
* uniform float u_opacity;
*
* varying vec2 v_texCoord;
*
* void main() {
* gl_FragColor = texture2D(u_image, v_texCoord) * u_opacity;
* }
* ```
*
* @api
*/
class WebGLPostProcessingPass {
/**
* @param {Options} options Options.
*/
constructor(options) {
this.gl_ = options.webGlContext;
const gl = this.gl_;
this.scaleRatio_ = options.scaleRatio || 1;
this.renderTargetTexture_ = gl.createTexture();
this.renderTargetTextureSize_ = null;
this.frameBuffer_ = gl.createFramebuffer();
// compile the program for the frame buffer
// TODO: make compilation errors show up
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(
vertexShader,
options.vertexShader || DEFAULT_VERTEX_SHADER
);
gl.compileShader(vertexShader);
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(
fragmentShader,
options.fragmentShader || DEFAULT_FRAGMENT_SHADER
);
gl.compileShader(fragmentShader);
this.renderTargetProgram_ = gl.createProgram();
gl.attachShader(this.renderTargetProgram_, vertexShader);
gl.attachShader(this.renderTargetProgram_, fragmentShader);
gl.linkProgram(this.renderTargetProgram_);
// bind the vertices buffer for the frame buffer
this.renderTargetVerticesBuffer_ = gl.createBuffer();
const verticesArray = [-1, -1, 1, -1, -1, 1, 1, -1, 1, 1, -1, 1];
gl.bindBuffer(gl.ARRAY_BUFFER, this.renderTargetVerticesBuffer_);
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array(verticesArray),
gl.STATIC_DRAW
);
this.renderTargetAttribLocation_ = gl.getAttribLocation(
this.renderTargetProgram_,
'a_position'
);
this.renderTargetUniformLocation_ = gl.getUniformLocation(
this.renderTargetProgram_,
'u_screenSize'
);
this.renderTargetOpacityLocation_ = gl.getUniformLocation(
this.renderTargetProgram_,
'u_opacity'
);
this.renderTargetTextureLocation_ = gl.getUniformLocation(
this.renderTargetProgram_,
'u_image'
);
/**
* Holds info about custom uniforms used in the post processing pass
* @type {Array<UniformInternalDescription>}
* @private
*/
this.uniforms_ = [];
options.uniforms &&
Object.keys(options.uniforms).forEach(
function (name) {
this.uniforms_.push({
value: options.uniforms[name],
location: gl.getUniformLocation(this.renderTargetProgram_, name),
});
}.bind(this)
);
}
/**
* Get the WebGL rendering context
* @return {WebGLRenderingContext} The rendering context.
* @api
*/
getGL() {
return this.gl_;
}
/**
* Initialize the render target texture of the post process, make sure it is at the
* right size and bind it as a render target for the next draw calls.
* The last step to be initialized will be the one where the primitives are rendered.
* @param {import("../PluggableMap.js").FrameState} frameState current frame state
* @api
*/
init(frameState) {
const gl = this.getGL();
const textureSize = [
gl.drawingBufferWidth * this.scaleRatio_,
gl.drawingBufferHeight * this.scaleRatio_,
];
// rendering goes to my buffer
gl.bindFramebuffer(gl.FRAMEBUFFER, this.getFrameBuffer());
gl.viewport(0, 0, textureSize[0], textureSize[1]);
// if size has changed: adjust canvas & render target texture
if (
!this.renderTargetTextureSize_ ||
this.renderTargetTextureSize_[0] !== textureSize[0] ||
this.renderTargetTextureSize_[1] !== textureSize[1]
) {
this.renderTargetTextureSize_ = textureSize;
// create a new texture
const level = 0;
const internalFormat = gl.RGBA;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.bindTexture(gl.TEXTURE_2D, this.renderTargetTexture_);
gl.texImage2D(
gl.TEXTURE_2D,
level,
internalFormat,
textureSize[0],
textureSize[1],
border,
format,
type,
data
);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// bind the texture to the framebuffer
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
this.renderTargetTexture_,
0
);
}
}
/**
* Render to the next postprocessing pass (or to the canvas if final pass).
* @param {import("../PluggableMap.js").FrameState} frameState current frame state
* @param {WebGLPostProcessingPass} [nextPass] Next pass, optional
* @param {function(WebGLRenderingContext, import("../PluggableMap.js").FrameState):void} [preCompose] Called before composing.
* @param {function(WebGLRenderingContext, import("../PluggableMap.js").FrameState):void} [postCompose] Called before composing.
* @api
*/
apply(frameState, nextPass, preCompose, postCompose) {
const gl = this.getGL();
const size = frameState.size;
gl.bindFramebuffer(
gl.FRAMEBUFFER,
nextPass ? nextPass.getFrameBuffer() : null
);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, this.renderTargetTexture_);
if (!nextPass) {
// clear the canvas if we are the first to render to it
// and preserveDrawingBuffer is true
const canvasId = getUid(gl.canvas);
if (!frameState.renderTargets[canvasId]) {
const attributes = gl.getContextAttributes();
if (attributes && attributes.preserveDrawingBuffer) {
gl.clearColor(0.0, 0.0, 0.0, 0.0);
gl.clear(gl.COLOR_BUFFER_BIT);
}
frameState.renderTargets[canvasId] = true;
}
}
gl.enable(gl.BLEND);
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
gl.bindBuffer(gl.ARRAY_BUFFER, this.renderTargetVerticesBuffer_);
gl.useProgram(this.renderTargetProgram_);
gl.enableVertexAttribArray(this.renderTargetAttribLocation_);
gl.vertexAttribPointer(
this.renderTargetAttribLocation_,
2,
gl.FLOAT,
false,
0,
0
);
gl.uniform2f(this.renderTargetUniformLocation_, size[0], size[1]);
gl.uniform1i(this.renderTargetTextureLocation_, 0);
const opacity = frameState.layerStatesArray[frameState.layerIndex].opacity;
gl.uniform1f(this.renderTargetOpacityLocation_, opacity);
this.applyUniforms(frameState);
if (preCompose) {
preCompose(gl, frameState);
}
gl.drawArrays(gl.TRIANGLES, 0, 6);
if (postCompose) {
postCompose(gl, frameState);
}
}
/**
* @return {WebGLFramebuffer} Frame buffer
* @api
*/
getFrameBuffer() {
return this.frameBuffer_;
}
/**
* Sets the custom uniforms based on what was given in the constructor.
* @param {import("../PluggableMap.js").FrameState} frameState Frame state.
* @private
*/
applyUniforms(frameState) {
const gl = this.getGL();
let value;
let textureSlot = 1;
this.uniforms_.forEach(function (uniform) {
value =
typeof uniform.value === 'function'
? uniform.value(frameState)
: uniform.value;
// apply value based on type
if (value instanceof HTMLCanvasElement || value instanceof ImageData) {
// create a texture & put data
if (!uniform.texture) {
uniform.texture = gl.createTexture();
}
gl.activeTexture(gl[`TEXTURE${textureSlot}`]);
gl.bindTexture(gl.TEXTURE_2D, uniform.texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
if (value instanceof ImageData) {
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
gl.RGBA,
value.width,
value.height,
0,
gl.UNSIGNED_BYTE,
new Uint8Array(value.data)
);
} else {
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
gl.RGBA,
gl.UNSIGNED_BYTE,
value
);
}
// fill texture slots
gl.uniform1i(uniform.location, textureSlot++);
} else if (Array.isArray(value)) {
switch (value.length) {
case 2:
gl.uniform2f(uniform.location, value[0], value[1]);
return;
case 3:
gl.uniform3f(uniform.location, value[0], value[1], value[2]);
return;
case 4:
gl.uniform4f(
uniform.location,
value[0],
value[1],
value[2],
value[3]
);
return;
default:
return;
}
} else if (typeof value === 'number') {
gl.uniform1f(uniform.location, value);
}
});
}
}
export default WebGLPostProcessingPass;

187
node_modules/ol/src/webgl/RenderTarget.js generated vendored Normal file
View File

@@ -0,0 +1,187 @@
/**
* A wrapper class to simplify rendering to a texture instead of the final canvas
* @module ol/webgl/RenderTarget
*/
import {equals} from '../array.js';
// for pixel color reading
const tmpArray4 = new Uint8Array(4);
/**
* @classdesc
* This class is a wrapper around the association of both a `WebGLTexture` and a `WebGLFramebuffer` instances,
* simplifying initialization and binding for rendering.
* @api
*/
class WebGLRenderTarget {
/**
* @param {import("./Helper.js").default} helper WebGL helper; mandatory.
* @param {Array<number>} [opt_size] Expected size of the render target texture; note: this can be changed later on.
*/
constructor(helper, opt_size) {
/**
* @private
* @type {import("./Helper.js").default}
*/
this.helper_ = helper;
const gl = helper.getGL();
/**
* @private
* @type {WebGLTexture}
*/
this.texture_ = gl.createTexture();
/**
* @private
* @type {WebGLFramebuffer}
*/
this.framebuffer_ = gl.createFramebuffer();
/**
* @type {Array<number>}
* @private
*/
this.size_ = opt_size || [1, 1];
/**
* @type {Uint8Array}
* @private
*/
this.data_ = new Uint8Array(0);
/**
* @type {boolean}
* @private
*/
this.dataCacheDirty_ = true;
this.updateSize_();
}
/**
* Changes the size of the render target texture. Note: will do nothing if the size
* is already the same.
* @param {Array<number>} size Expected size of the render target texture
* @api
*/
setSize(size) {
if (equals(size, this.size_)) {
return;
}
this.size_[0] = size[0];
this.size_[1] = size[1];
this.updateSize_();
}
/**
* Returns the size of the render target texture
* @return {Array<number>} Size of the render target texture
* @api
*/
getSize() {
return this.size_;
}
/**
* This will cause following calls to `#readAll` or `#readPixel` to download the content of the
* render target into memory, which is an expensive operation.
* This content will be kept in cache but should be cleared after each new render.
* @api
*/
clearCachedData() {
this.dataCacheDirty_ = true;
}
/**
* Returns the full content of the frame buffer as a series of r, g, b, a components
* in the 0-255 range (unsigned byte).
* @return {Uint8Array} Integer array of color values
* @api
*/
readAll() {
if (this.dataCacheDirty_) {
const size = this.size_;
const gl = this.helper_.getGL();
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer_);
gl.readPixels(
0,
0,
size[0],
size[1],
gl.RGBA,
gl.UNSIGNED_BYTE,
this.data_
);
this.dataCacheDirty_ = false;
}
return this.data_;
}
/**
* Reads one pixel of the frame buffer as an array of r, g, b, a components
* in the 0-255 range (unsigned byte).
* If x and/or y are outside of existing data, an array filled with 0 is returned.
* @param {number} x Pixel coordinate
* @param {number} y Pixel coordinate
* @return {Uint8Array} Integer array with one color value (4 components)
* @api
*/
readPixel(x, y) {
if (x < 0 || y < 0 || x > this.size_[0] || y >= this.size_[1]) {
tmpArray4[0] = 0;
tmpArray4[1] = 0;
tmpArray4[2] = 0;
tmpArray4[3] = 0;
return tmpArray4;
}
this.readAll();
const index =
Math.floor(x) + (this.size_[1] - Math.floor(y) - 1) * this.size_[0];
tmpArray4[0] = this.data_[index * 4];
tmpArray4[1] = this.data_[index * 4 + 1];
tmpArray4[2] = this.data_[index * 4 + 2];
tmpArray4[3] = this.data_[index * 4 + 3];
return tmpArray4;
}
/**
* @return {WebGLTexture} Texture to render to
*/
getTexture() {
return this.texture_;
}
/**
* @return {WebGLFramebuffer} Frame buffer of the render target
*/
getFramebuffer() {
return this.framebuffer_;
}
/**
* @private
*/
updateSize_() {
const size = this.size_;
const gl = this.helper_.getGL();
this.texture_ = this.helper_.createTexture(size, null, this.texture_);
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer_);
gl.viewport(0, 0, size[0], size[1]);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
this.texture_,
0
);
this.data_ = new Uint8Array(size[0] * size[1] * 4);
}
}
export default WebGLRenderTarget;

592
node_modules/ol/src/webgl/ShaderBuilder.js generated vendored Normal file
View File

@@ -0,0 +1,592 @@
/**
* Classes and utilities for generating shaders from literal style objects
* @module ol/webgl/ShaderBuilder
*/
import {
ValueTypes,
expressionToGlsl,
getStringNumberEquivalent,
uniformNameForVariable,
} from '../style/expressions.js';
/**
* @typedef {Object} VaryingDescription
* @property {string} name Varying name, as will be declared in the header.
* @property {string} type Varying type, either `float`, `vec2`, `vec4`...
* @property {string} expression Expression which will be assigned to the varying in the vertex shader, and
* passed on to the fragment shader.
*/
/**
* @classdesc
* This class implements a classic builder pattern for generating many different types of shaders.
* Methods can be chained, e. g.:
*
* ```js
* const shader = new ShaderBuilder()
* .addVarying('v_width', 'float', 'a_width')
* .addUniform('u_time')
* .setColorExpression('...')
* .setSizeExpression('...')
* .outputSymbolFragmentShader();
* ```
*/
export class ShaderBuilder {
constructor() {
/**
* Uniforms; these will be declared in the header (should include the type).
* @type {Array<string>}
* @private
*/
this.uniforms = [];
/**
* Attributes; these will be declared in the header (should include the type).
* @type {Array<string>}
* @private
*/
this.attributes = [];
/**
* Varyings with a name, a type and an expression.
* @type {Array<VaryingDescription>}
* @private
*/
this.varyings = [];
/**
* @type {string}
* @private
*/
this.sizeExpression = 'vec2(1.0)';
/**
* @type {string}
* @private
*/
this.rotationExpression = '0.0';
/**
* @type {string}
* @private
*/
this.offsetExpression = 'vec2(0.0)';
/**
* @type {string}
* @private
*/
this.colorExpression = 'vec4(1.0)';
/**
* @type {string}
* @private
*/
this.texCoordExpression = 'vec4(0.0, 0.0, 1.0, 1.0)';
/**
* @type {string}
* @private
*/
this.discardExpression = 'false';
/**
* @type {boolean}
* @private
*/
this.rotateWithView = false;
}
/**
* Adds a uniform accessible in both fragment and vertex shaders.
* The given name should include a type, such as `sampler2D u_texture`.
* @param {string} name Uniform name
* @return {ShaderBuilder} the builder object
*/
addUniform(name) {
this.uniforms.push(name);
return this;
}
/**
* Adds an attribute accessible in the vertex shader, read from the geometry buffer.
* The given name should include a type, such as `vec2 a_position`.
* @param {string} name Attribute name
* @return {ShaderBuilder} the builder object
*/
addAttribute(name) {
this.attributes.push(name);
return this;
}
/**
* Adds a varying defined in the vertex shader and accessible from the fragment shader.
* The type and expression of the varying have to be specified separately.
* @param {string} name Varying name
* @param {'float'|'vec2'|'vec3'|'vec4'} type Type
* @param {string} expression Expression used to assign a value to the varying.
* @return {ShaderBuilder} the builder object
*/
addVarying(name, type, expression) {
this.varyings.push({
name: name,
type: type,
expression: expression,
});
return this;
}
/**
* Sets an expression to compute the size of the shape.
* This expression can use all the uniforms and attributes available
* in the vertex shader, and should evaluate to a `vec2` value.
* @param {string} expression Size expression
* @return {ShaderBuilder} the builder object
*/
setSizeExpression(expression) {
this.sizeExpression = expression;
return this;
}
/**
* Sets an expression to compute the rotation of the shape.
* This expression can use all the uniforms and attributes available
* in the vertex shader, and should evaluate to a `float` value in radians.
* @param {string} expression Size expression
* @return {ShaderBuilder} the builder object
*/
setRotationExpression(expression) {
this.rotationExpression = expression;
return this;
}
/**
* Sets an expression to compute the offset of the symbol from the point center.
* This expression can use all the uniforms and attributes available
* in the vertex shader, and should evaluate to a `vec2` value.
* Note: will only be used for point geometry shaders.
* @param {string} expression Offset expression
* @return {ShaderBuilder} the builder object
*/
setSymbolOffsetExpression(expression) {
this.offsetExpression = expression;
return this;
}
/**
* Sets an expression to compute the color of the shape.
* This expression can use all the uniforms, varyings and attributes available
* in the fragment shader, and should evaluate to a `vec4` value.
* @param {string} expression Color expression
* @return {ShaderBuilder} the builder object
*/
setColorExpression(expression) {
this.colorExpression = expression;
return this;
}
/**
* Sets an expression to compute the texture coordinates of the vertices.
* This expression can use all the uniforms and attributes available
* in the vertex shader, and should evaluate to a `vec4` value.
* @param {string} expression Texture coordinate expression
* @return {ShaderBuilder} the builder object
*/
setTextureCoordinateExpression(expression) {
this.texCoordExpression = expression;
return this;
}
/**
* Sets an expression to determine whether a fragment (pixel) should be discarded,
* i.e. not drawn at all.
* This expression can use all the uniforms, varyings and attributes available
* in the fragment shader, and should evaluate to a `bool` value (it will be
* used in an `if` statement)
* @param {string} expression Fragment discard expression
* @return {ShaderBuilder} the builder object
*/
setFragmentDiscardExpression(expression) {
this.discardExpression = expression;
return this;
}
/**
* Sets whether the symbols should rotate with the view or stay aligned with the map.
* Note: will only be used for point geometry shaders.
* @param {boolean} rotateWithView Rotate with view
* @return {ShaderBuilder} the builder object
*/
setSymbolRotateWithView(rotateWithView) {
this.rotateWithView = rotateWithView;
return this;
}
/**
* @return {string} Previously set size expression
*/
getSizeExpression() {
return this.sizeExpression;
}
/**
* @return {string} Previously set symbol offset expression
*/
getOffsetExpression() {
return this.offsetExpression;
}
/**
* @return {string} Previously set color expression
*/
getColorExpression() {
return this.colorExpression;
}
/**
* @return {string} Previously set texture coordinate expression
*/
getTextureCoordinateExpression() {
return this.texCoordExpression;
}
/**
* @return {string} Previously set fragment discard expression
*/
getFragmentDiscardExpression() {
return this.discardExpression;
}
/**
* Generates a symbol vertex shader from the builder parameters,
* intended to be used on point geometries.
*
* Three uniforms are hardcoded in all shaders: `u_projectionMatrix`, `u_offsetScaleMatrix`,
* `u_offsetRotateMatrix`, `u_time`.
*
* The following attributes are hardcoded and expected to be present in the vertex buffers:
* `vec2 a_position`, `float a_index` (being the index of the vertex in the quad, 0 to 3).
*
* The following varyings are hardcoded and gives the coordinate of the pixel both in the quad and on the texture:
* `vec2 v_quadCoord`, `vec2 v_texCoord`
*
* @param {boolean} [forHitDetection] If true, the shader will be modified to include hit detection variables
* (namely, hit color with encoded feature id).
* @return {string} The full shader as a string.
*/
getSymbolVertexShader(forHitDetection) {
const offsetMatrix = this.rotateWithView
? 'u_offsetScaleMatrix * u_offsetRotateMatrix'
: 'u_offsetScaleMatrix';
let attributes = this.attributes;
let varyings = this.varyings;
if (forHitDetection) {
attributes = attributes.concat('vec4 a_hitColor');
varyings = varyings.concat({
name: 'v_hitColor',
type: 'vec4',
expression: 'a_hitColor',
});
}
return `precision mediump float;
uniform mat4 u_projectionMatrix;
uniform mat4 u_offsetScaleMatrix;
uniform mat4 u_offsetRotateMatrix;
uniform float u_time;
uniform float u_zoom;
uniform float u_resolution;
${this.uniforms
.map(function (uniform) {
return 'uniform ' + uniform + ';';
})
.join('\n')}
attribute vec2 a_position;
attribute float a_index;
${attributes
.map(function (attribute) {
return 'attribute ' + attribute + ';';
})
.join('\n')}
varying vec2 v_texCoord;
varying vec2 v_quadCoord;
${varyings
.map(function (varying) {
return 'varying ' + varying.type + ' ' + varying.name + ';';
})
.join('\n')}
void main(void) {
mat4 offsetMatrix = ${offsetMatrix};
vec2 halfSize = ${this.sizeExpression} * 0.5;
vec2 offset = ${this.offsetExpression};
float angle = ${this.rotationExpression};
float offsetX;
float offsetY;
if (a_index == 0.0) {
offsetX = (offset.x - halfSize.x) * cos(angle) + (offset.y - halfSize.y) * sin(angle);
offsetY = (offset.y - halfSize.y) * cos(angle) - (offset.x - halfSize.x) * sin(angle);
} else if (a_index == 1.0) {
offsetX = (offset.x + halfSize.x) * cos(angle) + (offset.y - halfSize.y) * sin(angle);
offsetY = (offset.y - halfSize.y) * cos(angle) - (offset.x + halfSize.x) * sin(angle);
} else if (a_index == 2.0) {
offsetX = (offset.x + halfSize.x) * cos(angle) + (offset.y + halfSize.y) * sin(angle);
offsetY = (offset.y + halfSize.y) * cos(angle) - (offset.x + halfSize.x) * sin(angle);
} else {
offsetX = (offset.x - halfSize.x) * cos(angle) + (offset.y + halfSize.y) * sin(angle);
offsetY = (offset.y + halfSize.y) * cos(angle) - (offset.x - halfSize.x) * sin(angle);
}
vec4 offsets = offsetMatrix * vec4(offsetX, offsetY, 0.0, 0.0);
gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0) + offsets;
vec4 texCoord = ${this.texCoordExpression};
float u = a_index == 0.0 || a_index == 3.0 ? texCoord.s : texCoord.p;
float v = a_index == 2.0 || a_index == 3.0 ? texCoord.t : texCoord.q;
v_texCoord = vec2(u, v);
u = a_index == 0.0 || a_index == 3.0 ? 0.0 : 1.0;
v = a_index == 2.0 || a_index == 3.0 ? 0.0 : 1.0;
v_quadCoord = vec2(u, v);
${varyings
.map(function (varying) {
return ' ' + varying.name + ' = ' + varying.expression + ';';
})
.join('\n')}
}`;
}
/**
* Generates a symbol fragment shader from the builder parameters,
* intended to be used on point geometries.
*
* Expects the following varyings to be transmitted by the vertex shader:
* `vec2 v_quadCoord`, `vec2 v_texCoord`
*
* @param {boolean} [forHitDetection] If true, the shader will be modified to include hit detection variables
* (namely, hit color with encoded feature id).
* @return {string} The full shader as a string.
*/
getSymbolFragmentShader(forHitDetection) {
const hitDetectionBypass = forHitDetection
? ' if (gl_FragColor.a < 0.1) { discard; } gl_FragColor = v_hitColor;'
: '';
let varyings = this.varyings;
if (forHitDetection) {
varyings = varyings.concat({
name: 'v_hitColor',
type: 'vec4',
expression: 'a_hitColor',
});
}
return `precision mediump float;
uniform float u_time;
uniform float u_zoom;
uniform float u_resolution;
${this.uniforms
.map(function (uniform) {
return 'uniform ' + uniform + ';';
})
.join('\n')}
varying vec2 v_texCoord;
varying vec2 v_quadCoord;
${varyings
.map(function (varying) {
return 'varying ' + varying.type + ' ' + varying.name + ';';
})
.join('\n')}
void main(void) {
if (${this.discardExpression}) { discard; }
gl_FragColor = ${this.colorExpression};
gl_FragColor.rgb *= gl_FragColor.a;
${hitDetectionBypass}
}`;
}
}
/**
* @typedef {Object} StyleParseResult
* @property {ShaderBuilder} builder Shader builder pre-configured according to a given style
* @property {Object<string,import("./Helper").UniformValue>} uniforms Uniform definitions.
* @property {Array<import("../renderer/webgl/PointsLayer").CustomAttribute>} attributes Attribute descriptions.
*/
/**
* Parses a {@link import("../style/literal").LiteralStyle} object and returns a {@link ShaderBuilder}
* object that has been configured according to the given style, as well as `attributes` and `uniforms`
* arrays to be fed to the `WebGLPointsRenderer` class.
*
* Also returns `uniforms` and `attributes` properties as expected by the
* {@link module:ol/renderer/webgl/PointsLayer~WebGLPointsLayerRenderer}.
*
* @param {import("../style/literal").LiteralStyle} style Literal style.
* @return {StyleParseResult} Result containing shader params, attributes and uniforms.
*/
export function parseLiteralStyle(style) {
const symbStyle = style.symbol;
const size = symbStyle.size !== undefined ? symbStyle.size : 1;
const color = symbStyle.color || 'white';
const texCoord = symbStyle.textureCoord || [0, 0, 1, 1];
const offset = symbStyle.offset || [0, 0];
const opacity = symbStyle.opacity !== undefined ? symbStyle.opacity : 1;
const rotation = symbStyle.rotation !== undefined ? symbStyle.rotation : 0;
/**
* @type {import("../style/expressions.js").ParsingContext}
*/
const vertContext = {
inFragmentShader: false,
variables: [],
attributes: [],
stringLiteralsMap: {},
functions: {},
};
const parsedSize = expressionToGlsl(
vertContext,
size,
ValueTypes.NUMBER_ARRAY | ValueTypes.NUMBER
);
const parsedOffset = expressionToGlsl(
vertContext,
offset,
ValueTypes.NUMBER_ARRAY
);
const parsedTexCoord = expressionToGlsl(
vertContext,
texCoord,
ValueTypes.NUMBER_ARRAY
);
const parsedRotation = expressionToGlsl(
vertContext,
rotation,
ValueTypes.NUMBER
);
/**
* @type {import("../style/expressions.js").ParsingContext}
*/
const fragContext = {
inFragmentShader: true,
variables: vertContext.variables,
attributes: [],
stringLiteralsMap: vertContext.stringLiteralsMap,
functions: {},
};
const parsedColor = expressionToGlsl(fragContext, color, ValueTypes.COLOR);
const parsedOpacity = expressionToGlsl(
fragContext,
opacity,
ValueTypes.NUMBER
);
let opacityFilter = '1.0';
const visibleSize = `vec2(${expressionToGlsl(
fragContext,
size,
ValueTypes.NUMBER_ARRAY | ValueTypes.NUMBER
)}).x`;
switch (symbStyle.symbolType) {
case 'square':
break;
case 'image':
break;
// taken from https://thebookofshaders.com/07/
case 'circle':
opacityFilter = `(1.0-smoothstep(1.-4./${visibleSize},1.,dot(v_quadCoord-.5,v_quadCoord-.5)*4.))`;
break;
case 'triangle':
const st = '(v_quadCoord*2.-1.)';
const a = `(atan(${st}.x,${st}.y))`;
opacityFilter = `(1.0-smoothstep(.5-3./${visibleSize},.5,cos(floor(.5+${a}/2.094395102)*2.094395102-${a})*length(${st})))`;
break;
default:
throw new Error('Unexpected symbol type: ' + symbStyle.symbolType);
}
const builder = new ShaderBuilder()
.setSizeExpression(`vec2(${parsedSize})`)
.setRotationExpression(parsedRotation)
.setSymbolOffsetExpression(parsedOffset)
.setTextureCoordinateExpression(parsedTexCoord)
.setSymbolRotateWithView(!!symbStyle.rotateWithView)
.setColorExpression(
`vec4(${parsedColor}.rgb, ${parsedColor}.a * ${parsedOpacity} * ${opacityFilter})`
);
if (style.filter) {
const parsedFilter = expressionToGlsl(
fragContext,
style.filter,
ValueTypes.BOOLEAN
);
builder.setFragmentDiscardExpression(`!${parsedFilter}`);
}
/** @type {Object<string,import("../webgl/Helper").UniformValue>} */
const uniforms = {};
// define one uniform per variable
fragContext.variables.forEach(function (varName) {
const uniformName = uniformNameForVariable(varName);
builder.addUniform(`float ${uniformName}`);
uniforms[uniformName] = function () {
if (!style.variables || style.variables[varName] === undefined) {
throw new Error(
`The following variable is missing from the style: ${varName}`
);
}
let value = style.variables[varName];
if (typeof value === 'string') {
value = getStringNumberEquivalent(vertContext, value);
}
return value !== undefined ? value : -9999999; // to avoid matching with the first string literal
};
});
if (symbStyle.symbolType === 'image' && symbStyle.src) {
const texture = new Image();
texture.crossOrigin =
symbStyle.crossOrigin === undefined ? 'anonymous' : symbStyle.crossOrigin;
texture.src = symbStyle.src;
builder
.addUniform('sampler2D u_texture')
.setColorExpression(
builder.getColorExpression() + ' * texture2D(u_texture, v_texCoord)'
);
uniforms['u_texture'] = texture;
}
// for each feature attribute used in the fragment shader, define a varying that will be used to pass data
// from the vertex to the fragment shader, as well as an attribute in the vertex shader (if not already present)
fragContext.attributes.forEach(function (attrName) {
if (vertContext.attributes.indexOf(attrName) === -1) {
vertContext.attributes.push(attrName);
}
builder.addVarying(`v_${attrName}`, 'float', `a_${attrName}`);
});
// for each feature attribute used in the vertex shader, define an attribute in the vertex shader.
vertContext.attributes.forEach(function (attrName) {
builder.addAttribute(`float a_${attrName}`);
});
return {
builder: builder,
attributes: vertContext.attributes.map(function (attributeName) {
return {
name: attributeName,
callback: function (feature, props) {
let value = props[attributeName];
if (typeof value === 'string') {
value = getStringNumberEquivalent(vertContext, value);
}
return value !== undefined ? value : -9999999; // to avoid matching with the first string literal
},
};
}),
uniforms: uniforms,
};
}

410
node_modules/ol/src/webgl/TileTexture.js generated vendored Normal file
View File

@@ -0,0 +1,410 @@
/**
* @module ol/webgl/TileTexture
*/
import DataTile from '../DataTile.js';
import EventTarget from '../events/Target.js';
import EventType from '../events/EventType.js';
import ImageTile from '../ImageTile.js';
import ReprojTile from '../reproj/Tile.js';
import TileState from '../TileState.js';
import WebGLArrayBuffer from './Buffer.js';
import {ARRAY_BUFFER, STATIC_DRAW} from '../webgl.js';
import {toSize} from '../size.js';
/**
* @param {WebGLRenderingContext} gl The WebGL context.
* @param {WebGLTexture} texture The texture.
* @param {boolean} interpolate Interpolate when resampling.
*/
function bindAndConfigure(gl, texture, interpolate) {
const resampleFilter = interpolate ? gl.LINEAR : gl.NEAREST;
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, resampleFilter);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, resampleFilter);
}
/**
* @param {WebGLRenderingContext} gl The WebGL context.
* @param {WebGLTexture} texture The texture.
* @param {HTMLImageElement|HTMLCanvasElement|HTMLVideoElement} image The image.
* @param {boolean} interpolate Interpolate when resampling.
*/
function uploadImageTexture(gl, texture, image, interpolate) {
bindAndConfigure(gl, texture, interpolate);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
}
/**
* @param {import("./Helper.js").default} helper The WebGL helper.
* @param {WebGLTexture} texture The texture.
* @param {import("../DataTile.js").Data} data The pixel data.
* @param {import("../size.js").Size} size The pixel size.
* @param {number} bandCount The band count.
* @param {boolean} interpolate Interpolate when resampling.
*/
function uploadDataTexture(
helper,
texture,
data,
size,
bandCount,
interpolate
) {
const gl = helper.getGL();
let textureType;
let canInterpolate;
if (data instanceof Float32Array) {
textureType = gl.FLOAT;
helper.getExtension('OES_texture_float');
const extension = helper.getExtension('OES_texture_float_linear');
canInterpolate = extension !== null;
} else {
textureType = gl.UNSIGNED_BYTE;
canInterpolate = true;
}
bindAndConfigure(gl, texture, interpolate && canInterpolate);
const bytesPerRow = data.byteLength / size[1];
let unpackAlignment = 1;
if (bytesPerRow % 8 === 0) {
unpackAlignment = 8;
} else if (bytesPerRow % 4 === 0) {
unpackAlignment = 4;
} else if (bytesPerRow % 2 === 0) {
unpackAlignment = 2;
}
let format;
switch (bandCount) {
case 1: {
format = gl.LUMINANCE;
break;
}
case 2: {
format = gl.LUMINANCE_ALPHA;
break;
}
case 3: {
format = gl.RGB;
break;
}
case 4: {
format = gl.RGBA;
break;
}
default: {
throw new Error(`Unsupported number of bands: ${bandCount}`);
}
}
const oldUnpackAlignment = gl.getParameter(gl.UNPACK_ALIGNMENT);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, unpackAlignment);
gl.texImage2D(
gl.TEXTURE_2D,
0,
format,
size[0],
size[1],
0,
format,
textureType,
data
);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, oldUnpackAlignment);
}
/**
* @type {CanvasRenderingContext2D}
*/
let pixelContext = null;
function createPixelContext() {
const canvas = document.createElement('canvas');
canvas.width = 1;
canvas.height = 1;
pixelContext = canvas.getContext('2d');
}
/**
* @typedef {import("../DataTile.js").default|ImageTile|ReprojTile} TileType
*/
/**
* @typedef {Object} Options
* @property {TileType} tile The tile.
* @property {import("../tilegrid/TileGrid.js").default} grid Tile grid.
* @property {import("../webgl/Helper.js").default} helper WebGL helper.
* @property {number} [gutter=0] The size in pixels of the gutter around image tiles to ignore.
*/
class TileTexture extends EventTarget {
/**
* @param {Options} options The tile texture options.
*/
constructor(options) {
super();
/**
* @type {TileType}
*/
this.tile;
/**
* @type {Array<WebGLTexture>}
*/
this.textures = [];
this.handleTileChange_ = this.handleTileChange_.bind(this);
/**
* @type {import("../size.js").Size}
* @private
*/
this.renderSize_ = toSize(
options.grid.getTileSize(options.tile.tileCoord[0])
);
/**
* @type {number}
* @private
*/
this.gutter_ = options.gutter || 0;
/**
* @type {number}
*/
this.bandCount = NaN;
/**
* @type {import("../webgl/Helper.js").default}
* @private
*/
this.helper_ = options.helper;
const coords = new WebGLArrayBuffer(ARRAY_BUFFER, STATIC_DRAW);
coords.fromArray([
0, // P0
1,
1, // P1
1,
1, // P2
0,
0, // P3
0,
]);
this.helper_.flushBufferData(coords);
/**
* @type {WebGLArrayBuffer}
*/
this.coords = coords;
this.setTile(options.tile);
}
/**
* @param {TileType} tile Tile.
*/
setTile(tile) {
if (tile !== this.tile) {
if (this.tile) {
this.tile.removeEventListener(EventType.CHANGE, this.handleTileChange_);
}
this.tile = tile;
this.textures.length = 0;
this.loaded = tile.getState() === TileState.LOADED;
if (this.loaded) {
this.uploadTile_();
} else {
if (tile instanceof ImageTile) {
const image = tile.getImage();
if (image instanceof Image && !image.crossOrigin) {
image.crossOrigin = 'anonymous';
}
}
tile.addEventListener(EventType.CHANGE, this.handleTileChange_);
}
}
}
uploadTile_() {
const helper = this.helper_;
const gl = helper.getGL();
const tile = this.tile;
if (tile instanceof ImageTile || tile instanceof ReprojTile) {
const texture = gl.createTexture();
this.textures.push(texture);
this.bandCount = 4;
uploadImageTexture(gl, texture, tile.getImage(), tile.interpolate);
return;
}
const sourceTileSize = tile.getSize();
const pixelSize = [
sourceTileSize[0] + 2 * this.gutter_,
sourceTileSize[1] + 2 * this.gutter_,
];
const data = tile.getData();
const isFloat = data instanceof Float32Array;
const pixelCount = pixelSize[0] * pixelSize[1];
const DataType = isFloat ? Float32Array : Uint8Array;
const bytesPerElement = DataType.BYTES_PER_ELEMENT;
const bytesPerRow = data.byteLength / pixelSize[1];
this.bandCount = Math.floor(bytesPerRow / bytesPerElement / pixelSize[0]);
const textureCount = Math.ceil(this.bandCount / 4);
if (textureCount === 1) {
const texture = gl.createTexture();
this.textures.push(texture);
uploadDataTexture(
helper,
texture,
data,
pixelSize,
this.bandCount,
tile.interpolate
);
return;
}
const textureDataArrays = new Array(textureCount);
for (let textureIndex = 0; textureIndex < textureCount; ++textureIndex) {
const texture = gl.createTexture();
this.textures.push(texture);
const bandCount =
textureIndex < textureCount - 1 ? 4 : this.bandCount % 4;
textureDataArrays[textureIndex] = new DataType(pixelCount * bandCount);
}
let dataIndex = 0;
let rowOffset = 0;
const colCount = pixelSize[0] * this.bandCount;
for (let rowIndex = 0; rowIndex < pixelSize[1]; ++rowIndex) {
for (let colIndex = 0; colIndex < colCount; ++colIndex) {
const dataValue = data[rowOffset + colIndex];
const pixelIndex = Math.floor(dataIndex / this.bandCount);
const bandIndex = colIndex % this.bandCount;
const textureIndex = Math.floor(bandIndex / 4);
const textureData = textureDataArrays[textureIndex];
const bandCount = textureData.length / pixelCount;
const textureBandIndex = bandIndex % 4;
textureData[pixelIndex * bandCount + textureBandIndex] = dataValue;
++dataIndex;
}
rowOffset += bytesPerRow / bytesPerElement;
}
for (let textureIndex = 0; textureIndex < textureCount; ++textureIndex) {
const texture = this.textures[textureIndex];
const textureData = textureDataArrays[textureIndex];
const bandCount = textureData.length / pixelCount;
uploadDataTexture(
helper,
texture,
textureData,
pixelSize,
bandCount,
tile.interpolate
);
}
}
handleTileChange_() {
if (this.tile.getState() === TileState.LOADED) {
this.loaded = true;
this.uploadTile_();
this.dispatchEvent(EventType.CHANGE);
}
}
disposeInternal() {
const gl = this.helper_.getGL();
this.helper_.deleteBuffer(this.coords);
for (let i = 0; i < this.textures.length; ++i) {
gl.deleteTexture(this.textures[i]);
}
this.tile.removeEventListener(EventType.CHANGE, this.handleTileChange_);
}
/**
* Get data for a pixel. If the tile is not loaded, null is returned.
* @param {number} renderCol The column index (in rendered tile space).
* @param {number} renderRow The row index (in rendered tile space).
* @return {import("../DataTile.js").Data|null} The data.
*/
getPixelData(renderCol, renderRow) {
if (!this.loaded) {
return null;
}
const renderWidth = this.renderSize_[0];
const renderHeight = this.renderSize_[1];
const gutter = this.gutter_;
if (this.tile instanceof DataTile) {
const sourceSize = this.tile.getSize();
const sourceWidthWithoutGutter = sourceSize[0];
const sourceHeightWithoutGutter = sourceSize[1];
const sourceWidth = sourceWidthWithoutGutter + 2 * gutter;
const sourceHeight = sourceHeightWithoutGutter + 2 * gutter;
const sourceCol =
gutter +
Math.floor(sourceWidthWithoutGutter * (renderCol / renderWidth));
const sourceRow =
gutter +
Math.floor(sourceHeightWithoutGutter * (renderRow / renderHeight));
const data = this.tile.getData();
if (data instanceof DataView) {
const bytesPerPixel = data.byteLength / (sourceWidth * sourceHeight);
const offset = bytesPerPixel * (sourceRow * sourceWidth + sourceCol);
const buffer = data.buffer.slice(offset, offset + bytesPerPixel);
return new DataView(buffer);
}
const offset = this.bandCount * (sourceRow * sourceWidth + sourceCol);
return data.slice(offset, offset + this.bandCount);
}
if (!pixelContext) {
createPixelContext();
}
pixelContext.clearRect(0, 0, 1, 1);
const image = this.tile.getImage();
const sourceWidth = image.width;
const sourceHeight = image.height;
const sourceWidthWithoutGutter = sourceWidth - 2 * gutter;
const sourceHeightWithoutGutter = sourceHeight - 2 * gutter;
const sourceCol =
gutter + Math.floor(sourceWidthWithoutGutter * (renderCol / renderWidth));
const sourceRow =
gutter +
Math.floor(sourceHeightWithoutGutter * (renderRow / renderHeight));
let data;
try {
pixelContext.drawImage(image, sourceCol, sourceRow, 1, 1, 0, 0, 1, 1);
data = pixelContext.getImageData(0, 0, 1, 1).data;
} catch (err) {
pixelContext = null;
return null;
}
return data;
}
}
export default TileTexture;