planning
All checks were successful
Publish To Prod / deploy_and_publish (push) Successful in 35s

This commit is contained in:
2024-10-14 09:15:30 +02:00
parent bcba00a730
commit 6e64e138e2
21059 changed files with 2317811 additions and 1 deletions

View File

@@ -0,0 +1,4 @@
export default class BaseDecoder {
decode(fileDirectory: any, buffer: any): Promise<any>;
}
//# sourceMappingURL=basedecoder.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"basedecoder.d.ts","sourceRoot":"","sources":["../../dist-module/compression/basedecoder.js"],"names":[],"mappings":"AAEA;IACE,sDAeC;CACF"}

View File

@@ -0,0 +1,18 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const predictor_js_1 = require("../predictor.js");
class BaseDecoder {
async decode(fileDirectory, buffer) {
const decoded = await this.decodeBlock(buffer);
const predictor = fileDirectory.Predictor || 1;
if (predictor !== 1) {
const isTiled = !fileDirectory.StripOffsets;
const tileWidth = isTiled ? fileDirectory.TileWidth : fileDirectory.ImageWidth;
const tileHeight = isTiled ? fileDirectory.TileLength : (fileDirectory.RowsPerStrip || fileDirectory.ImageLength);
return (0, predictor_js_1.applyPredictor)(decoded, predictor, tileWidth, tileHeight, fileDirectory.BitsPerSample, fileDirectory.PlanarConfiguration);
}
return decoded;
}
}
exports.default = BaseDecoder;
//# sourceMappingURL=basedecoder.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"basedecoder.js","sourceRoot":"","sources":["../../dist-module/compression/basedecoder.js"],"names":[],"mappings":";;AAAA,kDAAiD;AAEjD,MAAqB,WAAW;IAC9B,KAAK,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM;QAChC,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QAC/C,MAAM,SAAS,GAAG,aAAa,CAAC,SAAS,IAAI,CAAC,CAAC;QAC/C,IAAI,SAAS,KAAK,CAAC,EAAE;YACnB,MAAM,OAAO,GAAG,CAAC,aAAa,CAAC,YAAY,CAAC;YAC5C,MAAM,SAAS,GAAG,OAAO,CAAC,CAAC,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC,CAAC,aAAa,CAAC,UAAU,CAAC;YAC/E,MAAM,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,aAAa,CAAC,UAAU,CAAC,CAAC,CAAC,CACtD,aAAa,CAAC,YAAY,IAAI,aAAa,CAAC,WAAW,CACxD,CAAC;YACF,OAAO,IAAA,6BAAc,EACnB,OAAO,EAAE,SAAS,EAAE,SAAS,EAAE,UAAU,EAAE,aAAa,CAAC,aAAa,EACtE,aAAa,CAAC,mBAAmB,CAClC,CAAC;SACH;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;CACF;AAjBD,8BAiBC"}

View File

@@ -0,0 +1,5 @@
export default class DeflateDecoder extends BaseDecoder {
decodeBlock(buffer: any): any;
}
import BaseDecoder from "./basedecoder.js";
//# sourceMappingURL=deflate.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"deflate.d.ts","sourceRoot":"","sources":["../../dist-module/compression/deflate.js"],"names":[],"mappings":"AAGA;IACE,8BAEC;CACF"}

14
node_modules/geotiff/dist-node/compression/deflate.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const pako_1 = require("pako");
const basedecoder_js_1 = __importDefault(require("./basedecoder.js"));
class DeflateDecoder extends basedecoder_js_1.default {
decodeBlock(buffer) {
return (0, pako_1.inflate)(new Uint8Array(buffer)).buffer;
}
}
exports.default = DeflateDecoder;
//# sourceMappingURL=deflate.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"deflate.js","sourceRoot":"","sources":["../../dist-module/compression/deflate.js"],"names":[],"mappings":";;;;;AAAA,+BAA+B;AAC/B,sEAA2C;AAE3C,MAAqB,cAAe,SAAQ,wBAAW;IACrD,WAAW,CAAC,MAAM;QAChB,OAAO,IAAA,cAAO,EAAC,IAAI,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;IAChD,CAAC;CACF;AAJD,iCAIC"}

View File

@@ -0,0 +1,3 @@
export function addDecoder(cases: any, importFn: any): void;
export function getDecoder(fileDirectory: any): Promise<any>;
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../dist-module/compression/index.js"],"names":[],"mappings":"AAEA,4DAKC;AAED,6DAOC"}

51
node_modules/geotiff/dist-node/compression/index.js generated vendored Normal file
View File

@@ -0,0 +1,51 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getDecoder = exports.addDecoder = void 0;
const registry = new Map();
function addDecoder(cases, importFn) {
if (!Array.isArray(cases)) {
cases = [cases]; // eslint-disable-line no-param-reassign
}
cases.forEach((c) => registry.set(c, importFn));
}
exports.addDecoder = addDecoder;
async function getDecoder(fileDirectory) {
const importFn = registry.get(fileDirectory.Compression);
if (!importFn) {
throw new Error(`Unknown compression method identifier: ${fileDirectory.Compression}`);
}
const Decoder = await importFn();
return new Decoder(fileDirectory);
}
exports.getDecoder = getDecoder;
// Add default decoders to registry (end-user may override with other implementations)
addDecoder([undefined, 1], () => Promise.resolve().then(() => __importStar(require('./raw.js'))).then((m) => m.default));
addDecoder(5, () => Promise.resolve().then(() => __importStar(require('./lzw.js'))).then((m) => m.default));
addDecoder(6, () => {
throw new Error('old style JPEG compression is not supported.');
});
addDecoder(7, () => Promise.resolve().then(() => __importStar(require('./jpeg.js'))).then((m) => m.default));
addDecoder([8, 32946], () => Promise.resolve().then(() => __importStar(require('./deflate.js'))).then((m) => m.default));
addDecoder(32773, () => Promise.resolve().then(() => __importStar(require('./packbits.js'))).then((m) => m.default));
addDecoder(34887, () => Promise.resolve().then(() => __importStar(require('./lerc.js'))).then((m) => m.default));
addDecoder(50001, () => Promise.resolve().then(() => __importStar(require('./webimage.js'))).then((m) => m.default));
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../dist-module/compression/index.js"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAE,CAAC;AAE3B,SAAgB,UAAU,CAAC,KAAK,EAAE,QAAQ;IACxC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QACzB,KAAK,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,wCAAwC;KAC1D;IACD,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AAClD,CAAC;AALD,gCAKC;AAEM,KAAK,UAAU,UAAU,CAAC,aAAa;IAC5C,MAAM,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;IACzD,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,KAAK,CAAC,0CAA0C,aAAa,CAAC,WAAW,EAAE,CAAC,CAAC;KACxF;IACD,MAAM,OAAO,GAAG,MAAM,QAAQ,EAAE,CAAC;IACjC,OAAO,IAAI,OAAO,CAAC,aAAa,CAAC,CAAC;AACpC,CAAC;AAPD,gCAOC;AAED,sFAAsF;AACtF,UAAU,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,kDAAO,UAAU,IAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAC5E,UAAU,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,kDAAO,UAAU,IAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAC/D,UAAU,CAAC,CAAC,EAAE,GAAG,EAAE;IACjB,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC,CAAC;AAClE,CAAC,CAAC,CAAC;AACH,UAAU,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,kDAAO,WAAW,IAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAChE,UAAU,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,GAAG,EAAE,CAAC,kDAAO,cAAc,IAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAC5E,UAAU,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,kDAAO,eAAe,IAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AACxE,UAAU,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,kDAAO,WAAW,IAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AACpE,UAAU,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,kDAAO,eAAe,IAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC"}

36
node_modules/geotiff/dist-node/compression/jpeg.d.ts generated vendored Normal file
View File

@@ -0,0 +1,36 @@
export default class JpegDecoder extends BaseDecoder {
constructor(fileDirectory: any);
reader: JpegStreamReader;
decodeBlock(buffer: any): ArrayBufferLike;
}
import BaseDecoder from "./basedecoder.js";
declare class JpegStreamReader {
jfif: {
version: {
major: any;
minor: any;
};
densityUnits: any;
xDensity: number;
yDensity: number;
thumbWidth: any;
thumbHeight: any;
thumbData: any;
} | null;
adobe: {
version: any;
flags0: number;
flags1: number;
transformCode: any;
} | null;
quantizationTables: any[];
huffmanTablesAC: any[];
huffmanTablesDC: any[];
resetFrames(): void;
frames: any[] | undefined;
parse(data: any): void;
resetInterval: number | undefined;
getResult(): Uint8Array;
}
export {};
//# sourceMappingURL=jpeg.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"jpeg.d.ts","sourceRoot":"","sources":["../../dist-module/compression/jpeg.js"],"names":[],"mappings":"AAk3BA;IACE,gCAMC;IAJC,yBAAoC;IAMtC,0CAIC;CACF;;AAxUD;IAEI;;;;;;;;;;;aAAgB;IAChB;;;;;aAAiB;IAEjB,0BAA4B;IAC5B,uBAAyB;IACzB,uBAAyB;IAI3B,oBAEC;IADC,0BAAgB;IAGlB,uBA0PC;IA5CO,kCAAiC;IA8CzC,wBA4CC;CACF"}

853
node_modules/geotiff/dist-node/compression/jpeg.js generated vendored Normal file
View File

@@ -0,0 +1,853 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const basedecoder_js_1 = __importDefault(require("./basedecoder.js"));
/* -*- tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- /
/* vim: set shiftwidth=2 tabstop=2 autoindent cindent expandtab: */
/*
Copyright 2011 notmasteryet
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// - The JPEG specification can be found in the ITU CCITT Recommendation T.81
// (www.w3.org/Graphics/JPEG/itu-t81.pdf)
// - The JFIF specification can be found in the JPEG File Interchange Format
// (www.w3.org/Graphics/JPEG/jfif3.pdf)
// - The Adobe Application-Specific JPEG markers in the Supporting the DCT Filters
// in PostScript Level 2, Technical Note #5116
// (partners.adobe.com/public/developer/en/ps/sdk/5116.DCT_Filter.pdf)
const dctZigZag = new Int32Array([
0,
1, 8,
16, 9, 2,
3, 10, 17, 24,
32, 25, 18, 11, 4,
5, 12, 19, 26, 33, 40,
48, 41, 34, 27, 20, 13, 6,
7, 14, 21, 28, 35, 42, 49, 56,
57, 50, 43, 36, 29, 22, 15,
23, 30, 37, 44, 51, 58,
59, 52, 45, 38, 31,
39, 46, 53, 60,
61, 54, 47,
55, 62,
63,
]);
const dctCos1 = 4017; // cos(pi/16)
const dctSin1 = 799; // sin(pi/16)
const dctCos3 = 3406; // cos(3*pi/16)
const dctSin3 = 2276; // sin(3*pi/16)
const dctCos6 = 1567; // cos(6*pi/16)
const dctSin6 = 3784; // sin(6*pi/16)
const dctSqrt2 = 5793; // sqrt(2)
const dctSqrt1d2 = 2896; // sqrt(2) / 2
function buildHuffmanTable(codeLengths, values) {
let k = 0;
const code = [];
let length = 16;
while (length > 0 && !codeLengths[length - 1]) {
--length;
}
code.push({ children: [], index: 0 });
let p = code[0];
let q;
for (let i = 0; i < length; i++) {
for (let j = 0; j < codeLengths[i]; j++) {
p = code.pop();
p.children[p.index] = values[k];
while (p.index > 0) {
p = code.pop();
}
p.index++;
code.push(p);
while (code.length <= i) {
code.push(q = { children: [], index: 0 });
p.children[p.index] = q.children;
p = q;
}
k++;
}
if (i + 1 < length) {
// p here points to last code
code.push(q = { children: [], index: 0 });
p.children[p.index] = q.children;
p = q;
}
}
return code[0].children;
}
function decodeScan(data, initialOffset, frame, components, resetInterval, spectralStart, spectralEnd, successivePrev, successive) {
const { mcusPerLine, progressive } = frame;
const startOffset = initialOffset;
let offset = initialOffset;
let bitsData = 0;
let bitsCount = 0;
function readBit() {
if (bitsCount > 0) {
bitsCount--;
return (bitsData >> bitsCount) & 1;
}
bitsData = data[offset++];
if (bitsData === 0xFF) {
const nextByte = data[offset++];
if (nextByte) {
throw new Error(`unexpected marker: ${((bitsData << 8) | nextByte).toString(16)}`);
}
// unstuff 0
}
bitsCount = 7;
return bitsData >>> 7;
}
function decodeHuffman(tree) {
let node = tree;
let bit;
while ((bit = readBit()) !== null) { // eslint-disable-line no-cond-assign
node = node[bit];
if (typeof node === 'number') {
return node;
}
if (typeof node !== 'object') {
throw new Error('invalid huffman sequence');
}
}
return null;
}
function receive(initialLength) {
let length = initialLength;
let n = 0;
while (length > 0) {
const bit = readBit();
if (bit === null) {
return undefined;
}
n = (n << 1) | bit;
--length;
}
return n;
}
function receiveAndExtend(length) {
const n = receive(length);
if (n >= 1 << (length - 1)) {
return n;
}
return n + (-1 << length) + 1;
}
function decodeBaseline(component, zz) {
const t = decodeHuffman(component.huffmanTableDC);
const diff = t === 0 ? 0 : receiveAndExtend(t);
component.pred += diff;
zz[0] = component.pred;
let k = 1;
while (k < 64) {
const rs = decodeHuffman(component.huffmanTableAC);
const s = rs & 15;
const r = rs >> 4;
if (s === 0) {
if (r < 15) {
break;
}
k += 16;
}
else {
k += r;
const z = dctZigZag[k];
zz[z] = receiveAndExtend(s);
k++;
}
}
}
function decodeDCFirst(component, zz) {
const t = decodeHuffman(component.huffmanTableDC);
const diff = t === 0 ? 0 : (receiveAndExtend(t) << successive);
component.pred += diff;
zz[0] = component.pred;
}
function decodeDCSuccessive(component, zz) {
zz[0] |= readBit() << successive;
}
let eobrun = 0;
function decodeACFirst(component, zz) {
if (eobrun > 0) {
eobrun--;
return;
}
let k = spectralStart;
const e = spectralEnd;
while (k <= e) {
const rs = decodeHuffman(component.huffmanTableAC);
const s = rs & 15;
const r = rs >> 4;
if (s === 0) {
if (r < 15) {
eobrun = receive(r) + (1 << r) - 1;
break;
}
k += 16;
}
else {
k += r;
const z = dctZigZag[k];
zz[z] = receiveAndExtend(s) * (1 << successive);
k++;
}
}
}
let successiveACState = 0;
let successiveACNextValue;
function decodeACSuccessive(component, zz) {
let k = spectralStart;
const e = spectralEnd;
let r = 0;
while (k <= e) {
const z = dctZigZag[k];
const direction = zz[z] < 0 ? -1 : 1;
switch (successiveACState) {
case 0: { // initial state
const rs = decodeHuffman(component.huffmanTableAC);
const s = rs & 15;
r = rs >> 4;
if (s === 0) {
if (r < 15) {
eobrun = receive(r) + (1 << r);
successiveACState = 4;
}
else {
r = 16;
successiveACState = 1;
}
}
else {
if (s !== 1) {
throw new Error('invalid ACn encoding');
}
successiveACNextValue = receiveAndExtend(s);
successiveACState = r ? 2 : 3;
}
continue; // eslint-disable-line no-continue
}
case 1: // skipping r zero items
case 2:
if (zz[z]) {
zz[z] += (readBit() << successive) * direction;
}
else {
r--;
if (r === 0) {
successiveACState = successiveACState === 2 ? 3 : 0;
}
}
break;
case 3: // set value for a zero item
if (zz[z]) {
zz[z] += (readBit() << successive) * direction;
}
else {
zz[z] = successiveACNextValue << successive;
successiveACState = 0;
}
break;
case 4: // eob
if (zz[z]) {
zz[z] += (readBit() << successive) * direction;
}
break;
default:
break;
}
k++;
}
if (successiveACState === 4) {
eobrun--;
if (eobrun === 0) {
successiveACState = 0;
}
}
}
function decodeMcu(component, decodeFunction, mcu, row, col) {
const mcuRow = (mcu / mcusPerLine) | 0;
const mcuCol = mcu % mcusPerLine;
const blockRow = (mcuRow * component.v) + row;
const blockCol = (mcuCol * component.h) + col;
decodeFunction(component, component.blocks[blockRow][blockCol]);
}
function decodeBlock(component, decodeFunction, mcu) {
const blockRow = (mcu / component.blocksPerLine) | 0;
const blockCol = mcu % component.blocksPerLine;
decodeFunction(component, component.blocks[blockRow][blockCol]);
}
const componentsLength = components.length;
let component;
let i;
let j;
let k;
let n;
let decodeFn;
if (progressive) {
if (spectralStart === 0) {
decodeFn = successivePrev === 0 ? decodeDCFirst : decodeDCSuccessive;
}
else {
decodeFn = successivePrev === 0 ? decodeACFirst : decodeACSuccessive;
}
}
else {
decodeFn = decodeBaseline;
}
let mcu = 0;
let marker;
let mcuExpected;
if (componentsLength === 1) {
mcuExpected = components[0].blocksPerLine * components[0].blocksPerColumn;
}
else {
mcuExpected = mcusPerLine * frame.mcusPerColumn;
}
const usedResetInterval = resetInterval || mcuExpected;
while (mcu < mcuExpected) {
// reset interval stuff
for (i = 0; i < componentsLength; i++) {
components[i].pred = 0;
}
eobrun = 0;
if (componentsLength === 1) {
component = components[0];
for (n = 0; n < usedResetInterval; n++) {
decodeBlock(component, decodeFn, mcu);
mcu++;
}
}
else {
for (n = 0; n < usedResetInterval; n++) {
for (i = 0; i < componentsLength; i++) {
component = components[i];
const { h, v } = component;
for (j = 0; j < v; j++) {
for (k = 0; k < h; k++) {
decodeMcu(component, decodeFn, mcu, j, k);
}
}
}
mcu++;
// If we've reached our expected MCU's, stop decoding
if (mcu === mcuExpected) {
break;
}
}
}
// find marker
bitsCount = 0;
marker = (data[offset] << 8) | data[offset + 1];
if (marker < 0xFF00) {
throw new Error('marker was not found');
}
if (marker >= 0xFFD0 && marker <= 0xFFD7) { // RSTx
offset += 2;
}
else {
break;
}
}
return offset - startOffset;
}
function buildComponentData(frame, component) {
const lines = [];
const { blocksPerLine, blocksPerColumn } = component;
const samplesPerLine = blocksPerLine << 3;
const R = new Int32Array(64);
const r = new Uint8Array(64);
// A port of poppler's IDCT method which in turn is taken from:
// Christoph Loeffler, Adriaan Ligtenberg, George S. Moschytz,
// "Practical Fast 1-D DCT Algorithms with 11 Multiplications",
// IEEE Intl. Conf. on Acoustics, Speech & Signal Processing, 1989,
// 988-991.
function quantizeAndInverse(zz, dataOut, dataIn) {
const qt = component.quantizationTable;
let v0;
let v1;
let v2;
let v3;
let v4;
let v5;
let v6;
let v7;
let t;
const p = dataIn;
let i;
// dequant
for (i = 0; i < 64; i++) {
p[i] = zz[i] * qt[i];
}
// inverse DCT on rows
for (i = 0; i < 8; ++i) {
const row = 8 * i;
// check for all-zero AC coefficients
if (p[1 + row] === 0 && p[2 + row] === 0 && p[3 + row] === 0
&& p[4 + row] === 0 && p[5 + row] === 0 && p[6 + row] === 0
&& p[7 + row] === 0) {
t = ((dctSqrt2 * p[0 + row]) + 512) >> 10;
p[0 + row] = t;
p[1 + row] = t;
p[2 + row] = t;
p[3 + row] = t;
p[4 + row] = t;
p[5 + row] = t;
p[6 + row] = t;
p[7 + row] = t;
continue; // eslint-disable-line no-continue
}
// stage 4
v0 = ((dctSqrt2 * p[0 + row]) + 128) >> 8;
v1 = ((dctSqrt2 * p[4 + row]) + 128) >> 8;
v2 = p[2 + row];
v3 = p[6 + row];
v4 = ((dctSqrt1d2 * (p[1 + row] - p[7 + row])) + 128) >> 8;
v7 = ((dctSqrt1d2 * (p[1 + row] + p[7 + row])) + 128) >> 8;
v5 = p[3 + row] << 4;
v6 = p[5 + row] << 4;
// stage 3
t = (v0 - v1 + 1) >> 1;
v0 = (v0 + v1 + 1) >> 1;
v1 = t;
t = ((v2 * dctSin6) + (v3 * dctCos6) + 128) >> 8;
v2 = ((v2 * dctCos6) - (v3 * dctSin6) + 128) >> 8;
v3 = t;
t = (v4 - v6 + 1) >> 1;
v4 = (v4 + v6 + 1) >> 1;
v6 = t;
t = (v7 + v5 + 1) >> 1;
v5 = (v7 - v5 + 1) >> 1;
v7 = t;
// stage 2
t = (v0 - v3 + 1) >> 1;
v0 = (v0 + v3 + 1) >> 1;
v3 = t;
t = (v1 - v2 + 1) >> 1;
v1 = (v1 + v2 + 1) >> 1;
v2 = t;
t = ((v4 * dctSin3) + (v7 * dctCos3) + 2048) >> 12;
v4 = ((v4 * dctCos3) - (v7 * dctSin3) + 2048) >> 12;
v7 = t;
t = ((v5 * dctSin1) + (v6 * dctCos1) + 2048) >> 12;
v5 = ((v5 * dctCos1) - (v6 * dctSin1) + 2048) >> 12;
v6 = t;
// stage 1
p[0 + row] = v0 + v7;
p[7 + row] = v0 - v7;
p[1 + row] = v1 + v6;
p[6 + row] = v1 - v6;
p[2 + row] = v2 + v5;
p[5 + row] = v2 - v5;
p[3 + row] = v3 + v4;
p[4 + row] = v3 - v4;
}
// inverse DCT on columns
for (i = 0; i < 8; ++i) {
const col = i;
// check for all-zero AC coefficients
if (p[(1 * 8) + col] === 0 && p[(2 * 8) + col] === 0 && p[(3 * 8) + col] === 0
&& p[(4 * 8) + col] === 0 && p[(5 * 8) + col] === 0 && p[(6 * 8) + col] === 0
&& p[(7 * 8) + col] === 0) {
t = ((dctSqrt2 * dataIn[i + 0]) + 8192) >> 14;
p[(0 * 8) + col] = t;
p[(1 * 8) + col] = t;
p[(2 * 8) + col] = t;
p[(3 * 8) + col] = t;
p[(4 * 8) + col] = t;
p[(5 * 8) + col] = t;
p[(6 * 8) + col] = t;
p[(7 * 8) + col] = t;
continue; // eslint-disable-line no-continue
}
// stage 4
v0 = ((dctSqrt2 * p[(0 * 8) + col]) + 2048) >> 12;
v1 = ((dctSqrt2 * p[(4 * 8) + col]) + 2048) >> 12;
v2 = p[(2 * 8) + col];
v3 = p[(6 * 8) + col];
v4 = ((dctSqrt1d2 * (p[(1 * 8) + col] - p[(7 * 8) + col])) + 2048) >> 12;
v7 = ((dctSqrt1d2 * (p[(1 * 8) + col] + p[(7 * 8) + col])) + 2048) >> 12;
v5 = p[(3 * 8) + col];
v6 = p[(5 * 8) + col];
// stage 3
t = (v0 - v1 + 1) >> 1;
v0 = (v0 + v1 + 1) >> 1;
v1 = t;
t = ((v2 * dctSin6) + (v3 * dctCos6) + 2048) >> 12;
v2 = ((v2 * dctCos6) - (v3 * dctSin6) + 2048) >> 12;
v3 = t;
t = (v4 - v6 + 1) >> 1;
v4 = (v4 + v6 + 1) >> 1;
v6 = t;
t = (v7 + v5 + 1) >> 1;
v5 = (v7 - v5 + 1) >> 1;
v7 = t;
// stage 2
t = (v0 - v3 + 1) >> 1;
v0 = (v0 + v3 + 1) >> 1;
v3 = t;
t = (v1 - v2 + 1) >> 1;
v1 = (v1 + v2 + 1) >> 1;
v2 = t;
t = ((v4 * dctSin3) + (v7 * dctCos3) + 2048) >> 12;
v4 = ((v4 * dctCos3) - (v7 * dctSin3) + 2048) >> 12;
v7 = t;
t = ((v5 * dctSin1) + (v6 * dctCos1) + 2048) >> 12;
v5 = ((v5 * dctCos1) - (v6 * dctSin1) + 2048) >> 12;
v6 = t;
// stage 1
p[(0 * 8) + col] = v0 + v7;
p[(7 * 8) + col] = v0 - v7;
p[(1 * 8) + col] = v1 + v6;
p[(6 * 8) + col] = v1 - v6;
p[(2 * 8) + col] = v2 + v5;
p[(5 * 8) + col] = v2 - v5;
p[(3 * 8) + col] = v3 + v4;
p[(4 * 8) + col] = v3 - v4;
}
// convert to 8-bit integers
for (i = 0; i < 64; ++i) {
const sample = 128 + ((p[i] + 8) >> 4);
if (sample < 0) {
dataOut[i] = 0;
}
else if (sample > 0XFF) {
dataOut[i] = 0xFF;
}
else {
dataOut[i] = sample;
}
}
}
for (let blockRow = 0; blockRow < blocksPerColumn; blockRow++) {
const scanLine = blockRow << 3;
for (let i = 0; i < 8; i++) {
lines.push(new Uint8Array(samplesPerLine));
}
for (let blockCol = 0; blockCol < blocksPerLine; blockCol++) {
quantizeAndInverse(component.blocks[blockRow][blockCol], r, R);
let offset = 0;
const sample = blockCol << 3;
for (let j = 0; j < 8; j++) {
const line = lines[scanLine + j];
for (let i = 0; i < 8; i++) {
line[sample + i] = r[offset++];
}
}
}
}
return lines;
}
class JpegStreamReader {
constructor() {
this.jfif = null;
this.adobe = null;
this.quantizationTables = [];
this.huffmanTablesAC = [];
this.huffmanTablesDC = [];
this.resetFrames();
}
resetFrames() {
this.frames = [];
}
parse(data) {
let offset = 0;
// const { length } = data;
function readUint16() {
const value = (data[offset] << 8) | data[offset + 1];
offset += 2;
return value;
}
function readDataBlock() {
const length = readUint16();
const array = data.subarray(offset, offset + length - 2);
offset += array.length;
return array;
}
function prepareComponents(frame) {
let maxH = 0;
let maxV = 0;
let component;
let componentId;
for (componentId in frame.components) {
if (frame.components.hasOwnProperty(componentId)) {
component = frame.components[componentId];
if (maxH < component.h) {
maxH = component.h;
}
if (maxV < component.v) {
maxV = component.v;
}
}
}
const mcusPerLine = Math.ceil(frame.samplesPerLine / 8 / maxH);
const mcusPerColumn = Math.ceil(frame.scanLines / 8 / maxV);
for (componentId in frame.components) {
if (frame.components.hasOwnProperty(componentId)) {
component = frame.components[componentId];
const blocksPerLine = Math.ceil(Math.ceil(frame.samplesPerLine / 8) * component.h / maxH);
const blocksPerColumn = Math.ceil(Math.ceil(frame.scanLines / 8) * component.v / maxV);
const blocksPerLineForMcu = mcusPerLine * component.h;
const blocksPerColumnForMcu = mcusPerColumn * component.v;
const blocks = [];
for (let i = 0; i < blocksPerColumnForMcu; i++) {
const row = [];
for (let j = 0; j < blocksPerLineForMcu; j++) {
row.push(new Int32Array(64));
}
blocks.push(row);
}
component.blocksPerLine = blocksPerLine;
component.blocksPerColumn = blocksPerColumn;
component.blocks = blocks;
}
}
frame.maxH = maxH;
frame.maxV = maxV;
frame.mcusPerLine = mcusPerLine;
frame.mcusPerColumn = mcusPerColumn;
}
let fileMarker = readUint16();
if (fileMarker !== 0xFFD8) { // SOI (Start of Image)
throw new Error('SOI not found');
}
fileMarker = readUint16();
while (fileMarker !== 0xFFD9) { // EOI (End of image)
switch (fileMarker) {
case 0xFF00: break;
case 0xFFE0: // APP0 (Application Specific)
case 0xFFE1: // APP1
case 0xFFE2: // APP2
case 0xFFE3: // APP3
case 0xFFE4: // APP4
case 0xFFE5: // APP5
case 0xFFE6: // APP6
case 0xFFE7: // APP7
case 0xFFE8: // APP8
case 0xFFE9: // APP9
case 0xFFEA: // APP10
case 0xFFEB: // APP11
case 0xFFEC: // APP12
case 0xFFED: // APP13
case 0xFFEE: // APP14
case 0xFFEF: // APP15
case 0xFFFE: { // COM (Comment)
const appData = readDataBlock();
if (fileMarker === 0xFFE0) {
if (appData[0] === 0x4A && appData[1] === 0x46 && appData[2] === 0x49
&& appData[3] === 0x46 && appData[4] === 0) { // 'JFIF\x00'
this.jfif = {
version: { major: appData[5], minor: appData[6] },
densityUnits: appData[7],
xDensity: (appData[8] << 8) | appData[9],
yDensity: (appData[10] << 8) | appData[11],
thumbWidth: appData[12],
thumbHeight: appData[13],
thumbData: appData.subarray(14, 14 + (3 * appData[12] * appData[13])),
};
}
}
// TODO APP1 - Exif
if (fileMarker === 0xFFEE) {
if (appData[0] === 0x41 && appData[1] === 0x64 && appData[2] === 0x6F
&& appData[3] === 0x62 && appData[4] === 0x65 && appData[5] === 0) { // 'Adobe\x00'
this.adobe = {
version: appData[6],
flags0: (appData[7] << 8) | appData[8],
flags1: (appData[9] << 8) | appData[10],
transformCode: appData[11],
};
}
}
break;
}
case 0xFFDB: { // DQT (Define Quantization Tables)
const quantizationTablesLength = readUint16();
const quantizationTablesEnd = quantizationTablesLength + offset - 2;
while (offset < quantizationTablesEnd) {
const quantizationTableSpec = data[offset++];
const tableData = new Int32Array(64);
if ((quantizationTableSpec >> 4) === 0) { // 8 bit values
for (let j = 0; j < 64; j++) {
const z = dctZigZag[j];
tableData[z] = data[offset++];
}
}
else if ((quantizationTableSpec >> 4) === 1) { // 16 bit
for (let j = 0; j < 64; j++) {
const z = dctZigZag[j];
tableData[z] = readUint16();
}
}
else {
throw new Error('DQT: invalid table spec');
}
this.quantizationTables[quantizationTableSpec & 15] = tableData;
}
break;
}
case 0xFFC0: // SOF0 (Start of Frame, Baseline DCT)
case 0xFFC1: // SOF1 (Start of Frame, Extended DCT)
case 0xFFC2: { // SOF2 (Start of Frame, Progressive DCT)
readUint16(); // skip data length
const frame = {
extended: (fileMarker === 0xFFC1),
progressive: (fileMarker === 0xFFC2),
precision: data[offset++],
scanLines: readUint16(),
samplesPerLine: readUint16(),
components: {},
componentsOrder: [],
};
const componentsCount = data[offset++];
let componentId;
// let maxH = 0;
// let maxV = 0;
for (let i = 0; i < componentsCount; i++) {
componentId = data[offset];
const h = data[offset + 1] >> 4;
const v = data[offset + 1] & 15;
const qId = data[offset + 2];
frame.componentsOrder.push(componentId);
frame.components[componentId] = {
h,
v,
quantizationIdx: qId,
};
offset += 3;
}
prepareComponents(frame);
this.frames.push(frame);
break;
}
case 0xFFC4: { // DHT (Define Huffman Tables)
const huffmanLength = readUint16();
for (let i = 2; i < huffmanLength;) {
const huffmanTableSpec = data[offset++];
const codeLengths = new Uint8Array(16);
let codeLengthSum = 0;
for (let j = 0; j < 16; j++, offset++) {
codeLengths[j] = data[offset];
codeLengthSum += codeLengths[j];
}
const huffmanValues = new Uint8Array(codeLengthSum);
for (let j = 0; j < codeLengthSum; j++, offset++) {
huffmanValues[j] = data[offset];
}
i += 17 + codeLengthSum;
if ((huffmanTableSpec >> 4) === 0) {
this.huffmanTablesDC[huffmanTableSpec & 15] = buildHuffmanTable(codeLengths, huffmanValues);
}
else {
this.huffmanTablesAC[huffmanTableSpec & 15] = buildHuffmanTable(codeLengths, huffmanValues);
}
}
break;
}
case 0xFFDD: // DRI (Define Restart Interval)
readUint16(); // skip data length
this.resetInterval = readUint16();
break;
case 0xFFDA: { // SOS (Start of Scan)
readUint16(); // skip length
const selectorsCount = data[offset++];
const components = [];
const frame = this.frames[0];
for (let i = 0; i < selectorsCount; i++) {
const component = frame.components[data[offset++]];
const tableSpec = data[offset++];
component.huffmanTableDC = this.huffmanTablesDC[tableSpec >> 4];
component.huffmanTableAC = this.huffmanTablesAC[tableSpec & 15];
components.push(component);
}
const spectralStart = data[offset++];
const spectralEnd = data[offset++];
const successiveApproximation = data[offset++];
const processed = decodeScan(data, offset, frame, components, this.resetInterval, spectralStart, spectralEnd, successiveApproximation >> 4, successiveApproximation & 15);
offset += processed;
break;
}
case 0xFFFF: // Fill bytes
if (data[offset] !== 0xFF) { // Avoid skipping a valid marker.
offset--;
}
break;
default:
if (data[offset - 3] === 0xFF
&& data[offset - 2] >= 0xC0 && data[offset - 2] <= 0xFE) {
// could be incorrect encoding -- last 0xFF byte of the previous
// block was eaten by the encoder
offset -= 3;
break;
}
throw new Error(`unknown JPEG marker ${fileMarker.toString(16)}`);
}
fileMarker = readUint16();
}
}
getResult() {
const { frames } = this;
if (this.frames.length === 0) {
throw new Error('no frames were decoded');
}
else if (this.frames.length > 1) {
console.warn('more than one frame is not supported');
}
// set each frame's components quantization table
for (let i = 0; i < this.frames.length; i++) {
const cp = this.frames[i].components;
for (const j of Object.keys(cp)) {
cp[j].quantizationTable = this.quantizationTables[cp[j].quantizationIdx];
delete cp[j].quantizationIdx;
}
}
const frame = frames[0];
const { components, componentsOrder } = frame;
const outComponents = [];
const width = frame.samplesPerLine;
const height = frame.scanLines;
for (let i = 0; i < componentsOrder.length; i++) {
const component = components[componentsOrder[i]];
outComponents.push({
lines: buildComponentData(frame, component),
scaleX: component.h / frame.maxH,
scaleY: component.v / frame.maxV,
});
}
const out = new Uint8Array(width * height * outComponents.length);
let oi = 0;
for (let y = 0; y < height; ++y) {
for (let x = 0; x < width; ++x) {
for (let i = 0; i < outComponents.length; ++i) {
const component = outComponents[i];
out[oi] = component.lines[0 | y * component.scaleY][0 | x * component.scaleX];
++oi;
}
}
}
return out;
}
}
class JpegDecoder extends basedecoder_js_1.default {
constructor(fileDirectory) {
super();
this.reader = new JpegStreamReader();
if (fileDirectory.JPEGTables) {
this.reader.parse(fileDirectory.JPEGTables);
}
}
decodeBlock(buffer) {
this.reader.resetFrames();
this.reader.parse(new Uint8Array(buffer));
return this.reader.getResult().buffer;
}
}
exports.default = JpegDecoder;
//# sourceMappingURL=jpeg.js.map

File diff suppressed because one or more lines are too long

9
node_modules/geotiff/dist-node/compression/lerc.d.ts generated vendored Normal file
View File

@@ -0,0 +1,9 @@
export default class LercDecoder extends BaseDecoder {
constructor(fileDirectory: any);
planarConfiguration: any;
samplesPerPixel: any;
addCompression: any;
decodeBlock(buffer: any): any;
}
import BaseDecoder from "./basedecoder.js";
//# sourceMappingURL=lerc.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"lerc.d.ts","sourceRoot":"","sources":["../../dist-module/compression/lerc.js"],"names":[],"mappings":"AAKA;IACE,gCAOC;IAJC,yBAA2H;IAC3H,qBAA+G;IAE/G,oBAAiF;IAGnF,8BAcC;CACF"}

33
node_modules/geotiff/dist-node/compression/lerc.js generated vendored Normal file
View File

@@ -0,0 +1,33 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const pako_1 = require("pako");
const lerc_1 = __importDefault(require("lerc"));
const basedecoder_js_1 = __importDefault(require("./basedecoder.js"));
const globals_js_1 = require("../globals.js");
class LercDecoder extends basedecoder_js_1.default {
constructor(fileDirectory) {
super();
this.planarConfiguration = typeof fileDirectory.PlanarConfiguration !== 'undefined' ? fileDirectory.PlanarConfiguration : 1;
this.samplesPerPixel = typeof fileDirectory.SamplesPerPixel !== 'undefined' ? fileDirectory.SamplesPerPixel : 1;
this.addCompression = fileDirectory.LercParameters[globals_js_1.LercParameters.AddCompression];
}
decodeBlock(buffer) {
switch (this.addCompression) {
case globals_js_1.LercAddCompression.None:
break;
case globals_js_1.LercAddCompression.Deflate:
buffer = (0, pako_1.inflate)(new Uint8Array(buffer)).buffer; // eslint-disable-line no-param-reassign, prefer-destructuring
break;
default:
throw new Error(`Unsupported LERC additional compression method identifier: ${this.addCompression}`);
}
const lercResult = lerc_1.default.decode(buffer, { returnPixelInterleavedDims: this.planarConfiguration === 1 });
const lercData = lercResult.pixels[0];
return lercData.buffer;
}
}
exports.default = LercDecoder;
//# sourceMappingURL=lerc.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"lerc.js","sourceRoot":"","sources":["../../dist-module/compression/lerc.js"],"names":[],"mappings":";;;;;AAAA,+BAA+B;AAC/B,gDAAwB;AACxB,sEAA2C;AAC3C,8CAAmE;AAEnE,MAAqB,WAAY,SAAQ,wBAAW;IAClD,YAAY,aAAa;QACvB,KAAK,EAAE,CAAC;QAER,IAAI,CAAC,mBAAmB,GAAG,OAAO,aAAa,CAAC,mBAAmB,KAAK,WAAW,CAAC,CAAC,CAAC,aAAa,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5H,IAAI,CAAC,eAAe,GAAG,OAAO,aAAa,CAAC,eAAe,KAAK,WAAW,CAAC,CAAC,CAAC,aAAa,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;QAEhH,IAAI,CAAC,cAAc,GAAG,aAAa,CAAC,cAAc,CAAC,2BAAc,CAAC,cAAc,CAAC,CAAC;IACpF,CAAC;IAED,WAAW,CAAC,MAAM;QAChB,QAAQ,IAAI,CAAC,cAAc,EAAE;YAC3B,KAAK,+BAAkB,CAAC,IAAI;gBAC1B,MAAM;YACR,KAAK,+BAAkB,CAAC,OAAO;gBAC7B,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,8DAA8D;gBAC/G,MAAM;YACR;gBACE,MAAM,IAAI,KAAK,CAAC,8DAA8D,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC;SACxG;QAED,MAAM,UAAU,GAAG,cAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,0BAA0B,EAAE,IAAI,CAAC,mBAAmB,KAAK,CAAC,EAAE,CAAC,CAAC;QACvG,MAAM,QAAQ,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;QACtC,OAAO,QAAQ,CAAC,MAAM,CAAC;IACzB,CAAC;CACF;AAzBD,8BAyBC"}

5
node_modules/geotiff/dist-node/compression/lzw.d.ts generated vendored Normal file
View File

@@ -0,0 +1,5 @@
export default class LZWDecoder extends BaseDecoder {
decodeBlock(buffer: any): ArrayBufferLike;
}
import BaseDecoder from "./basedecoder.js";
//# sourceMappingURL=lzw.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"lzw.d.ts","sourceRoot":"","sources":["../../dist-module/compression/lzw.js"],"names":[],"mappings":"AA8HA;IACE,0CAEC;CACF"}

134
node_modules/geotiff/dist-node/compression/lzw.js generated vendored Normal file
View File

@@ -0,0 +1,134 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const basedecoder_js_1 = __importDefault(require("./basedecoder.js"));
const MIN_BITS = 9;
const CLEAR_CODE = 256; // clear code
const EOI_CODE = 257; // end of information
const MAX_BYTELENGTH = 12;
function getByte(array, position, length) {
const d = position % 8;
const a = Math.floor(position / 8);
const de = 8 - d;
const ef = (position + length) - ((a + 1) * 8);
let fg = (8 * (a + 2)) - (position + length);
const dg = ((a + 2) * 8) - position;
fg = Math.max(0, fg);
if (a >= array.length) {
console.warn('ran off the end of the buffer before finding EOI_CODE (end on input code)');
return EOI_CODE;
}
let chunk1 = array[a] & ((2 ** (8 - d)) - 1);
chunk1 <<= (length - de);
let chunks = chunk1;
if (a + 1 < array.length) {
let chunk2 = array[a + 1] >>> fg;
chunk2 <<= Math.max(0, (length - dg));
chunks += chunk2;
}
if (ef > 8 && a + 2 < array.length) {
const hi = ((a + 3) * 8) - (position + length);
const chunk3 = array[a + 2] >>> hi;
chunks += chunk3;
}
return chunks;
}
function appendReversed(dest, source) {
for (let i = source.length - 1; i >= 0; i--) {
dest.push(source[i]);
}
return dest;
}
function decompress(input) {
const dictionaryIndex = new Uint16Array(4093);
const dictionaryChar = new Uint8Array(4093);
for (let i = 0; i <= 257; i++) {
dictionaryIndex[i] = 4096;
dictionaryChar[i] = i;
}
let dictionaryLength = 258;
let byteLength = MIN_BITS;
let position = 0;
function initDictionary() {
dictionaryLength = 258;
byteLength = MIN_BITS;
}
function getNext(array) {
const byte = getByte(array, position, byteLength);
position += byteLength;
return byte;
}
function addToDictionary(i, c) {
dictionaryChar[dictionaryLength] = c;
dictionaryIndex[dictionaryLength] = i;
dictionaryLength++;
return dictionaryLength - 1;
}
function getDictionaryReversed(n) {
const rev = [];
for (let i = n; i !== 4096; i = dictionaryIndex[i]) {
rev.push(dictionaryChar[i]);
}
return rev;
}
const result = [];
initDictionary();
const array = new Uint8Array(input);
let code = getNext(array);
let oldCode;
while (code !== EOI_CODE) {
if (code === CLEAR_CODE) {
initDictionary();
code = getNext(array);
while (code === CLEAR_CODE) {
code = getNext(array);
}
if (code === EOI_CODE) {
break;
}
else if (code > CLEAR_CODE) {
throw new Error(`corrupted code at scanline ${code}`);
}
else {
const val = getDictionaryReversed(code);
appendReversed(result, val);
oldCode = code;
}
}
else if (code < dictionaryLength) {
const val = getDictionaryReversed(code);
appendReversed(result, val);
addToDictionary(oldCode, val[val.length - 1]);
oldCode = code;
}
else {
const oldVal = getDictionaryReversed(oldCode);
if (!oldVal) {
throw new Error(`Bogus entry. Not in dictionary, ${oldCode} / ${dictionaryLength}, position: ${position}`);
}
appendReversed(result, oldVal);
result.push(oldVal[oldVal.length - 1]);
addToDictionary(oldCode, oldVal[oldVal.length - 1]);
oldCode = code;
}
if (dictionaryLength + 1 >= (2 ** byteLength)) {
if (byteLength === MAX_BYTELENGTH) {
oldCode = undefined;
}
else {
byteLength++;
}
}
code = getNext(array);
}
return new Uint8Array(result);
}
class LZWDecoder extends basedecoder_js_1.default {
decodeBlock(buffer) {
return decompress(buffer, false).buffer;
}
}
exports.default = LZWDecoder;
//# sourceMappingURL=lzw.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"lzw.js","sourceRoot":"","sources":["../../dist-module/compression/lzw.js"],"names":[],"mappings":";;;;;AAAA,sEAA2C;AAE3C,MAAM,QAAQ,GAAG,CAAC,CAAC;AACnB,MAAM,UAAU,GAAG,GAAG,CAAC,CAAC,aAAa;AACrC,MAAM,QAAQ,GAAG,GAAG,CAAC,CAAC,qBAAqB;AAC3C,MAAM,cAAc,GAAG,EAAE,CAAC;AAE1B,SAAS,OAAO,CAAC,KAAK,EAAE,QAAQ,EAAE,MAAM;IACtC,MAAM,CAAC,GAAG,QAAQ,GAAG,CAAC,CAAC;IACvB,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,GAAG,CAAC,CAAC,CAAC;IACnC,MAAM,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC;IACjB,MAAM,EAAE,GAAG,CAAC,QAAQ,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAC/C,IAAI,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC;IAC7C,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,QAAQ,CAAC;IACpC,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;IACrB,IAAI,CAAC,IAAI,KAAK,CAAC,MAAM,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,2EAA2E,CAAC,CAAC;QAC1F,OAAO,QAAQ,CAAC;KACjB;IACD,IAAI,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAC7C,MAAM,KAAK,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC;IACzB,IAAI,MAAM,GAAG,MAAM,CAAC;IACpB,IAAI,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE;QACxB,IAAI,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC;QACjC,MAAM,KAAK,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC;QACtC,MAAM,IAAI,MAAM,CAAC;KAClB;IACD,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE;QAClC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC;QAC/C,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC;QACnC,MAAM,IAAI,MAAM,CAAC;KAClB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,cAAc,CAAC,IAAI,EAAE,MAAM;IAClC,KAAK,IAAI,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;QAC3C,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KACtB;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAS,UAAU,CAAC,KAAK;IACvB,MAAM,eAAe,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;IAC9C,MAAM,cAAc,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;IAC5C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,GAAG,EAAE,CAAC,EAAE,EAAE;QAC7B,eAAe,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;QAC1B,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;KACvB;IACD,IAAI,gBAAgB,GAAG,GAAG,CAAC;IAC3B,IAAI,UAAU,GAAG,QAAQ,CAAC;IAC1B,IAAI,QAAQ,GAAG,CAAC,CAAC;IAEjB,SAAS,cAAc;QACrB,gBAAgB,GAAG,GAAG,CAAC;QACvB,UAAU,GAAG,QAAQ,CAAC;IACxB,CAAC;IACD,SAAS,OAAO,CAAC,KAAK;QACpB,MAAM,IAAI,GAAG,OAAO,CAAC,KAAK,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;QAClD,QAAQ,IAAI,UAAU,CAAC;QACvB,OAAO,IAAI,CAAC;IACd,CAAC;IACD,SAAS,eAAe,CAAC,CAAC,EAAE,CAAC;QAC3B,cAAc,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC;QACrC,eAAe,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC;QACtC,gBAAgB,EAAE,CAAC;QACnB,OAAO,gBAAgB,GAAG,CAAC,CAAC;IAC9B,CAAC;IACD,SAAS,qBAAqB,CAAC,CAAC;QAC9B,MAAM,GAAG,GAAG,EAAE,CAAC;QACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,IAAI,EAAE,CAAC,GAAG,eAAe,CAAC,CAAC,CAAC,EAAE;YAClD,GAAG,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7B;QACD,OAAO,GAAG,CAAC;IACb,CAAC;IAED,MAAM,MAAM,GAAG,EAAE,CAAC;IAClB,cAAc,EAAE,CAAC;IACjB,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,KAAK,CAAC,CAAC;IACpC,IAAI,IAAI,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;IAC1B,IAAI,OAAO,CAAC;IACZ,OAAO,IAAI,KAAK,QAAQ,EAAE;QACxB,IAAI,IAAI,KAAK,UAAU,EAAE;YACvB,cAAc,EAAE,CAAC;YACjB,IAAI,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;YACtB,OAAO,IAAI,KAAK,UAAU,EAAE;gBAC1B,IAAI,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;aACvB;YAED,IAAI,IAAI,KAAK,QAAQ,EAAE;gBACrB,MAAM;aACP;iBAAM,IAAI,IAAI,GAAG,UAAU,EAAE;gBAC5B,MAAM,IAAI,KAAK,CAAC,8BAA8B,IAAI,EAAE,CAAC,CAAC;aACvD;iBAAM;gBACL,MAAM,GAAG,GAAG,qBAAqB,CAAC,IAAI,CAAC,CAAC;gBACxC,cAAc,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;gBAC5B,OAAO,GAAG,IAAI,CAAC;aAChB;SACF;aAAM,IAAI,IAAI,GAAG,gBAAgB,EAAE;YAClC,MAAM,GAAG,GAAG,qBAAqB,CAAC,IAAI,CAAC,CAAC;YACxC,cAAc,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;YAC5B,eAAe,CAAC,OAAO,EAAE,GAAG,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;YAC9C,OAAO,GAAG,IAAI,CAAC;SAChB;aAAM;YACL,MAAM,MAAM,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC;YAC9C,IAAI,CAAC,MAAM,EAAE;gBACX,MAAM,IAAI,KAAK,CAAC,mCAAmC,OAAO,MAAM,gBAAgB,eAAe,QAAQ,EAAE,CAAC,CAAC;aAC5G;YACD,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAC/B,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;YACvC,eAAe,CAAC,OAAO,EAAE,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;YACpD,OAAO,GAAG,IAAI,CAAC;SAChB;QAED,IAAI,gBAAgB,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,UAAU,CAAC,EAAE;YAC7C,IAAI,UAAU,KAAK,cAAc,EAAE;gBACjC,OAAO,GAAG,SAAS,CAAC;aACrB;iBAAM;gBACL,UAAU,EAAE,CAAC;aACd;SACF;QACD,IAAI,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;KACvB;IACD,OAAO,IAAI,UAAU,CAAC,MAAM,CAAC,CAAC;AAChC,CAAC;AAED,MAAqB,UAAW,SAAQ,wBAAW;IACjD,WAAW,CAAC,MAAM;QAChB,OAAO,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,MAAM,CAAC;IAC1C,CAAC;CACF;AAJD,6BAIC"}

View File

@@ -0,0 +1,5 @@
export default class PackbitsDecoder extends BaseDecoder {
decodeBlock(buffer: any): ArrayBufferLike;
}
import BaseDecoder from "./basedecoder.js";
//# sourceMappingURL=packbits.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"packbits.d.ts","sourceRoot":"","sources":["../../dist-module/compression/packbits.js"],"names":[],"mappings":"AAEA;IACE,0CAqBC;CACF"}

32
node_modules/geotiff/dist-node/compression/packbits.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const basedecoder_js_1 = __importDefault(require("./basedecoder.js"));
class PackbitsDecoder extends basedecoder_js_1.default {
decodeBlock(buffer) {
const dataView = new DataView(buffer);
const out = [];
for (let i = 0; i < buffer.byteLength; ++i) {
let header = dataView.getInt8(i);
if (header < 0) {
const next = dataView.getUint8(i + 1);
header = -header;
for (let j = 0; j <= header; ++j) {
out.push(next);
}
i += 1;
}
else {
for (let j = 0; j <= header; ++j) {
out.push(dataView.getUint8(i + j + 1));
}
i += header + 1;
}
}
return new Uint8Array(out).buffer;
}
}
exports.default = PackbitsDecoder;
//# sourceMappingURL=packbits.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"packbits.js","sourceRoot":"","sources":["../../dist-module/compression/packbits.js"],"names":[],"mappings":";;;;;AAAA,sEAA2C;AAE3C,MAAqB,eAAgB,SAAQ,wBAAW;IACtD,WAAW,CAAC,MAAM;QAChB,MAAM,QAAQ,GAAG,IAAI,QAAQ,CAAC,MAAM,CAAC,CAAC;QACtC,MAAM,GAAG,GAAG,EAAE,CAAC;QAEf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,UAAU,EAAE,EAAE,CAAC,EAAE;YAC1C,IAAI,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YACjC,IAAI,MAAM,GAAG,CAAC,EAAE;gBACd,MAAM,IAAI,GAAG,QAAQ,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACtC,MAAM,GAAG,CAAC,MAAM,CAAC;gBACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,MAAM,EAAE,EAAE,CAAC,EAAE;oBAChC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBAChB;gBACD,CAAC,IAAI,CAAC,CAAC;aACR;iBAAM;gBACL,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,MAAM,EAAE,EAAE,CAAC,EAAE;oBAChC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;iBACxC;gBACD,CAAC,IAAI,MAAM,GAAG,CAAC,CAAC;aACjB;SACF;QACD,OAAO,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC;IACpC,CAAC;CACF;AAvBD,kCAuBC"}

5
node_modules/geotiff/dist-node/compression/raw.d.ts generated vendored Normal file
View File

@@ -0,0 +1,5 @@
export default class RawDecoder extends BaseDecoder {
decodeBlock(buffer: any): any;
}
import BaseDecoder from "./basedecoder.js";
//# sourceMappingURL=raw.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"raw.d.ts","sourceRoot":"","sources":["../../dist-module/compression/raw.js"],"names":[],"mappings":"AAEA;IACE,8BAEC;CACF"}

13
node_modules/geotiff/dist-node/compression/raw.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const basedecoder_js_1 = __importDefault(require("./basedecoder.js"));
class RawDecoder extends basedecoder_js_1.default {
decodeBlock(buffer) {
return buffer;
}
}
exports.default = RawDecoder;
//# sourceMappingURL=raw.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"raw.js","sourceRoot":"","sources":["../../dist-module/compression/raw.js"],"names":[],"mappings":";;;;;AAAA,sEAA2C;AAE3C,MAAqB,UAAW,SAAQ,wBAAW;IACjD,WAAW,CAAC,MAAM;QAChB,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAJD,6BAIC"}

View File

@@ -0,0 +1,10 @@
/**
* class WebImageDecoder
*
* This decoder uses the browsers image decoding facilities to read image
* formats like WebP when supported.
*/
export default class WebImageDecoder extends BaseDecoder {
}
import BaseDecoder from "./basedecoder.js";
//# sourceMappingURL=webimage.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"webimage.d.ts","sourceRoot":"","sources":["../../dist-module/compression/webimage.js"],"names":[],"mappings":"AAEA;;;;;GAKG;AACH;CA+BC"}

43
node_modules/geotiff/dist-node/compression/webimage.js generated vendored Normal file
View File

@@ -0,0 +1,43 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const basedecoder_js_1 = __importDefault(require("./basedecoder.js"));
/**
* class WebImageDecoder
*
* This decoder uses the browsers image decoding facilities to read image
* formats like WebP when supported.
*/
class WebImageDecoder extends basedecoder_js_1.default {
constructor() {
super();
if (typeof createImageBitmap === 'undefined') {
throw new Error('Cannot decode WebImage as `createImageBitmap` is not available');
}
else if (typeof document === 'undefined' && typeof OffscreenCanvas === 'undefined') {
throw new Error('Cannot decode WebImage as neither `document` nor `OffscreenCanvas` is not available');
}
}
async decode(fileDirectory, buffer) {
const blob = new Blob([buffer]);
const imageBitmap = await createImageBitmap(blob);
let canvas;
if (typeof document !== 'undefined') {
canvas = document.createElement('canvas');
canvas.width = imageBitmap.width;
canvas.height = imageBitmap.height;
}
else {
canvas = new OffscreenCanvas(imageBitmap.width, imageBitmap.height);
}
const ctx = canvas.getContext('2d');
ctx.drawImage(imageBitmap, 0, 0);
// TODO: check how many samples per pixel we have, and return RGB/RGBA accordingly
// it seems like GDAL always encodes via RGBA which does not require a translation
return ctx.getImageData(0, 0, imageBitmap.width, imageBitmap.height).data.buffer;
}
}
exports.default = WebImageDecoder;
//# sourceMappingURL=webimage.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"webimage.js","sourceRoot":"","sources":["../../dist-module/compression/webimage.js"],"names":[],"mappings":";;;;;AAAA,sEAA2C;AAE3C;;;;;GAKG;AACH,MAAqB,eAAgB,SAAQ,wBAAW;IACtD;QACE,KAAK,EAAE,CAAC;QACR,IAAI,OAAO,iBAAiB,KAAK,WAAW,EAAE;YAC5C,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;SACnF;aAAM,IAAI,OAAO,QAAQ,KAAK,WAAW,IAAI,OAAO,eAAe,KAAK,WAAW,EAAE;YACpF,MAAM,IAAI,KAAK,CAAC,qFAAqF,CAAC,CAAC;SACxG;IACH,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM;QAChC,MAAM,IAAI,GAAG,IAAI,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;QAChC,MAAM,WAAW,GAAG,MAAM,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAElD,IAAI,MAAM,CAAC;QACX,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACnC,MAAM,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;YAC1C,MAAM,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;YACjC,MAAM,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC;SACpC;aAAM;YACL,MAAM,GAAG,IAAI,eAAe,CAAC,WAAW,CAAC,KAAK,EAAE,WAAW,CAAC,MAAM,CAAC,CAAC;SACrE;QAED,MAAM,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACpC,GAAG,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;QAEjC,kFAAkF;QAClF,kFAAkF;QAElF,OAAO,GAAG,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,CAAC,KAAK,EAAE,WAAW,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;IACnF,CAAC;CACF;AA/BD,kCA+BC"}

25
node_modules/geotiff/dist-node/dataslice.d.ts generated vendored Normal file
View File

@@ -0,0 +1,25 @@
export default class DataSlice {
constructor(arrayBuffer: any, sliceOffset: any, littleEndian: any, bigTiff: any);
_dataView: DataView;
_sliceOffset: any;
_littleEndian: any;
_bigTiff: any;
get sliceOffset(): any;
get sliceTop(): any;
get littleEndian(): any;
get bigTiff(): any;
get buffer(): ArrayBuffer;
covers(offset: any, length: any): boolean;
readUint8(offset: any): number;
readInt8(offset: any): number;
readUint16(offset: any): number;
readInt16(offset: any): number;
readUint32(offset: any): number;
readInt32(offset: any): number;
readFloat32(offset: any): number;
readFloat64(offset: any): number;
readUint64(offset: any): number;
readInt64(offset: any): number;
readOffset(offset: any): number;
}
//# sourceMappingURL=dataslice.d.ts.map

1
node_modules/geotiff/dist-node/dataslice.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"dataslice.d.ts","sourceRoot":"","sources":["../dist-module/dataslice.js"],"names":[],"mappings":"AAAA;IACE,iFAKC;IAJC,oBAA0C;IAC1C,kBAA+B;IAC/B,mBAAiC;IACjC,cAAuB;IAGzB,uBAEC;IAED,oBAEC;IAED,wBAEC;IAED,mBAEC;IAED,0BAEC;IAED,0CAEC;IAED,+BAIC;IAED,8BAIC;IAED,gCAIC;IAED,+BAIC;IAED,gCAIC;IAED,+BAIC;IAED,iCAIC;IAED,iCAIC;IAED,gCAuBC;IAGD,+BAyBC;IAED,gCAKC;CACF"}

105
node_modules/geotiff/dist-node/dataslice.js generated vendored Normal file
View File

@@ -0,0 +1,105 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class DataSlice {
constructor(arrayBuffer, sliceOffset, littleEndian, bigTiff) {
this._dataView = new DataView(arrayBuffer);
this._sliceOffset = sliceOffset;
this._littleEndian = littleEndian;
this._bigTiff = bigTiff;
}
get sliceOffset() {
return this._sliceOffset;
}
get sliceTop() {
return this._sliceOffset + this.buffer.byteLength;
}
get littleEndian() {
return this._littleEndian;
}
get bigTiff() {
return this._bigTiff;
}
get buffer() {
return this._dataView.buffer;
}
covers(offset, length) {
return this.sliceOffset <= offset && this.sliceTop >= offset + length;
}
readUint8(offset) {
return this._dataView.getUint8(offset - this._sliceOffset, this._littleEndian);
}
readInt8(offset) {
return this._dataView.getInt8(offset - this._sliceOffset, this._littleEndian);
}
readUint16(offset) {
return this._dataView.getUint16(offset - this._sliceOffset, this._littleEndian);
}
readInt16(offset) {
return this._dataView.getInt16(offset - this._sliceOffset, this._littleEndian);
}
readUint32(offset) {
return this._dataView.getUint32(offset - this._sliceOffset, this._littleEndian);
}
readInt32(offset) {
return this._dataView.getInt32(offset - this._sliceOffset, this._littleEndian);
}
readFloat32(offset) {
return this._dataView.getFloat32(offset - this._sliceOffset, this._littleEndian);
}
readFloat64(offset) {
return this._dataView.getFloat64(offset - this._sliceOffset, this._littleEndian);
}
readUint64(offset) {
const left = this.readUint32(offset);
const right = this.readUint32(offset + 4);
let combined;
if (this._littleEndian) {
combined = left + ((2 ** 32) * right);
if (!Number.isSafeInteger(combined)) {
throw new Error(`${combined} exceeds MAX_SAFE_INTEGER. `
+ 'Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues');
}
return combined;
}
combined = ((2 ** 32) * left) + right;
if (!Number.isSafeInteger(combined)) {
throw new Error(`${combined} exceeds MAX_SAFE_INTEGER. `
+ 'Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues');
}
return combined;
}
// adapted from https://stackoverflow.com/a/55338384/8060591
readInt64(offset) {
let value = 0;
const isNegative = (this._dataView.getUint8(offset + (this._littleEndian ? 7 : 0)) & 0x80)
> 0;
let carrying = true;
for (let i = 0; i < 8; i++) {
let byte = this._dataView.getUint8(offset + (this._littleEndian ? i : 7 - i));
if (isNegative) {
if (carrying) {
if (byte !== 0x00) {
byte = ~(byte - 1) & 0xff;
carrying = false;
}
}
else {
byte = ~byte & 0xff;
}
}
value += byte * (256 ** i);
}
if (isNegative) {
value = -value;
}
return value;
}
readOffset(offset) {
if (this._bigTiff) {
return this.readUint64(offset);
}
return this.readUint32(offset);
}
}
exports.default = DataSlice;
//# sourceMappingURL=dataslice.js.map

1
node_modules/geotiff/dist-node/dataslice.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"dataslice.js","sourceRoot":"","sources":["../dist-module/dataslice.js"],"names":[],"mappings":";;AAAA,MAAqB,SAAS;IAC5B,YAAY,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,OAAO;QACzD,IAAI,CAAC,SAAS,GAAG,IAAI,QAAQ,CAAC,WAAW,CAAC,CAAC;QAC3C,IAAI,CAAC,YAAY,GAAG,WAAW,CAAC;QAChC,IAAI,CAAC,aAAa,GAAG,YAAY,CAAC;QAClC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;IAC1B,CAAC;IAED,IAAI,WAAW;QACb,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAED,IAAI,QAAQ;QACV,OAAO,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC;IACpD,CAAC;IAED,IAAI,YAAY;QACd,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED,IAAI,OAAO;QACT,OAAO,IAAI,CAAC,QAAQ,CAAC;IACvB,CAAC;IAED,IAAI,MAAM;QACR,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;IAC/B,CAAC;IAED,MAAM,CAAC,MAAM,EAAE,MAAM;QACnB,OAAO,IAAI,CAAC,WAAW,IAAI,MAAM,IAAI,IAAI,CAAC,QAAQ,IAAI,MAAM,GAAG,MAAM,CAAC;IACxE,CAAC;IAED,SAAS,CAAC,MAAM;QACd,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,CAC5B,MAAM,GAAG,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,CAC/C,CAAC;IACJ,CAAC;IAED,QAAQ,CAAC,MAAM;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,OAAO,CAC3B,MAAM,GAAG,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,CAC/C,CAAC;IACJ,CAAC;IAED,UAAU,CAAC,MAAM;QACf,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAC7B,MAAM,GAAG,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,CAC/C,CAAC;IACJ,CAAC;IAED,SAAS,CAAC,MAAM;QACd,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,CAC5B,MAAM,GAAG,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,CAC/C,CAAC;IACJ,CAAC;IAED,UAAU,CAAC,MAAM;QACf,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAC7B,MAAM,GAAG,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,CAC/C,CAAC;IACJ,CAAC;IAED,SAAS,CAAC,MAAM;QACd,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,CAC5B,MAAM,GAAG,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,CAC/C,CAAC;IACJ,CAAC;IAED,WAAW,CAAC,MAAM;QAChB,OAAO,IAAI,CAAC,SAAS,CAAC,UAAU,CAC9B,MAAM,GAAG,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,CAC/C,CAAC;IACJ,CAAC;IAED,WAAW,CAAC,MAAM;QAChB,OAAO,IAAI,CAAC,SAAS,CAAC,UAAU,CAC9B,MAAM,GAAG,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,CAC/C,CAAC;IACJ,CAAC;IAED,UAAU,CAAC,MAAM;QACf,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QACrC,MAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAC1C,IAAI,QAAQ,CAAC;QACb,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,QAAQ,GAAG,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,KAAK,CAAC,CAAC;YACtC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE;gBACnC,MAAM,IAAI,KAAK,CACb,GAAG,QAAQ,6BAA6B;sBACtC,gHAAgH,CACnH,CAAC;aACH;YACD,OAAO,QAAQ,CAAC;SACjB;QACD,QAAQ,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,KAAK,CAAC;QACtC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE;YACnC,MAAM,IAAI,KAAK,CACb,GAAG,QAAQ,6BAA6B;kBACtC,gHAAgH,CACnH,CAAC;SACH;QAED,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,4DAA4D;IAC5D,SAAS,CAAC,MAAM;QACd,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,MAAM,UAAU,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;cACtF,CAAC,CAAC;QACN,IAAI,QAAQ,GAAG,IAAI,CAAC;QACpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,IAAI,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAChC,MAAM,GAAG,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAC1C,CAAC;YACF,IAAI,UAAU,EAAE;gBACd,IAAI,QAAQ,EAAE;oBACZ,IAAI,IAAI,KAAK,IAAI,EAAE;wBACjB,IAAI,GAAG,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC;wBAC1B,QAAQ,GAAG,KAAK,CAAC;qBAClB;iBACF;qBAAM;oBACL,IAAI,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC;iBACrB;aACF;YACD,KAAK,IAAI,IAAI,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;SAC5B;QACD,IAAI,UAAU,EAAE;YACd,KAAK,GAAG,CAAC,KAAK,CAAC;SAChB;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,UAAU,CAAC,MAAM;QACf,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,OAAO,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;SAChC;QACD,OAAO,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IACjC,CAAC;CACF;AA3ID,4BA2IC"}

17
node_modules/geotiff/dist-node/dataview64.d.ts generated vendored Normal file
View File

@@ -0,0 +1,17 @@
export default class DataView64 {
constructor(arrayBuffer: any);
_dataView: DataView;
get buffer(): ArrayBuffer;
getUint64(offset: any, littleEndian: any): number;
getInt64(offset: any, littleEndian: any): number;
getUint8(offset: any, littleEndian: any): number;
getInt8(offset: any, littleEndian: any): number;
getUint16(offset: any, littleEndian: any): number;
getInt16(offset: any, littleEndian: any): number;
getUint32(offset: any, littleEndian: any): number;
getInt32(offset: any, littleEndian: any): number;
getFloat16(offset: any, littleEndian: any): number;
getFloat32(offset: any, littleEndian: any): number;
getFloat64(offset: any, littleEndian: any): number;
}
//# sourceMappingURL=dataview64.d.ts.map

1
node_modules/geotiff/dist-node/dataview64.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"dataview64.d.ts","sourceRoot":"","sources":["../dist-module/dataview64.js"],"names":[],"mappings":"AAEA;IACE,8BAEC;IADC,oBAA0C;IAG5C,0BAEC;IAED,kDAuBC;IAGD,iDAsBC;IAED,iDAEC;IAED,gDAEC;IAED,kDAEC;IAED,iDAEC;IAED,kDAEC;IAED,iDAEC;IAED,mDAEC;IAED,mDAEC;IAED,mDAEC;CACF"}

84
node_modules/geotiff/dist-node/dataview64.js generated vendored Normal file
View File

@@ -0,0 +1,84 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const float16_1 = require("@petamoriken/float16");
class DataView64 {
constructor(arrayBuffer) {
this._dataView = new DataView(arrayBuffer);
}
get buffer() {
return this._dataView.buffer;
}
getUint64(offset, littleEndian) {
const left = this.getUint32(offset, littleEndian);
const right = this.getUint32(offset + 4, littleEndian);
let combined;
if (littleEndian) {
combined = left + ((2 ** 32) * right);
if (!Number.isSafeInteger(combined)) {
throw new Error(`${combined} exceeds MAX_SAFE_INTEGER. `
+ 'Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues');
}
return combined;
}
combined = ((2 ** 32) * left) + right;
if (!Number.isSafeInteger(combined)) {
throw new Error(`${combined} exceeds MAX_SAFE_INTEGER. `
+ 'Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues');
}
return combined;
}
// adapted from https://stackoverflow.com/a/55338384/8060591
getInt64(offset, littleEndian) {
let value = 0;
const isNegative = (this._dataView.getUint8(offset + (littleEndian ? 7 : 0)) & 0x80) > 0;
let carrying = true;
for (let i = 0; i < 8; i++) {
let byte = this._dataView.getUint8(offset + (littleEndian ? i : 7 - i));
if (isNegative) {
if (carrying) {
if (byte !== 0x00) {
byte = ~(byte - 1) & 0xff;
carrying = false;
}
}
else {
byte = ~byte & 0xff;
}
}
value += byte * (256 ** i);
}
if (isNegative) {
value = -value;
}
return value;
}
getUint8(offset, littleEndian) {
return this._dataView.getUint8(offset, littleEndian);
}
getInt8(offset, littleEndian) {
return this._dataView.getInt8(offset, littleEndian);
}
getUint16(offset, littleEndian) {
return this._dataView.getUint16(offset, littleEndian);
}
getInt16(offset, littleEndian) {
return this._dataView.getInt16(offset, littleEndian);
}
getUint32(offset, littleEndian) {
return this._dataView.getUint32(offset, littleEndian);
}
getInt32(offset, littleEndian) {
return this._dataView.getInt32(offset, littleEndian);
}
getFloat16(offset, littleEndian) {
return (0, float16_1.getFloat16)(this._dataView, offset, littleEndian);
}
getFloat32(offset, littleEndian) {
return this._dataView.getFloat32(offset, littleEndian);
}
getFloat64(offset, littleEndian) {
return this._dataView.getFloat64(offset, littleEndian);
}
}
exports.default = DataView64;
//# sourceMappingURL=dataview64.js.map

1
node_modules/geotiff/dist-node/dataview64.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"dataview64.js","sourceRoot":"","sources":["../dist-module/dataview64.js"],"names":[],"mappings":";;AAAA,kDAAkD;AAElD,MAAqB,UAAU;IAC7B,YAAY,WAAW;QACrB,IAAI,CAAC,SAAS,GAAG,IAAI,QAAQ,CAAC,WAAW,CAAC,CAAC;IAC7C,CAAC;IAED,IAAI,MAAM;QACR,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;IAC/B,CAAC;IAED,SAAS,CAAC,MAAM,EAAE,YAAY;QAC5B,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;QAClD,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,YAAY,CAAC,CAAC;QACvD,IAAI,QAAQ,CAAC;QACb,IAAI,YAAY,EAAE;YAChB,QAAQ,GAAG,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,KAAK,CAAC,CAAC;YACtC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE;gBACnC,MAAM,IAAI,KAAK,CACb,GAAG,QAAQ,6BAA6B;sBACtC,gHAAgH,CACnH,CAAC;aACH;YACD,OAAO,QAAQ,CAAC;SACjB;QACD,QAAQ,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,KAAK,CAAC;QACtC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE;YACnC,MAAM,IAAI,KAAK,CACb,GAAG,QAAQ,6BAA6B;kBACtC,gHAAgH,CACnH,CAAC;SACH;QAED,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,4DAA4D;IAC5D,QAAQ,CAAC,MAAM,EAAE,YAAY;QAC3B,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,MAAM,UAAU,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;QACzF,IAAI,QAAQ,GAAG,IAAI,CAAC;QACpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,IAAI,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACxE,IAAI,UAAU,EAAE;gBACd,IAAI,QAAQ,EAAE;oBACZ,IAAI,IAAI,KAAK,IAAI,EAAE;wBACjB,IAAI,GAAG,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC;wBAC1B,QAAQ,GAAG,KAAK,CAAC;qBAClB;iBACF;qBAAM;oBACL,IAAI,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC;iBACrB;aACF;YACD,KAAK,IAAI,IAAI,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;SAC5B;QACD,IAAI,UAAU,EAAE;YACd,KAAK,GAAG,CAAC,KAAK,CAAC;SAChB;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,QAAQ,CAAC,MAAM,EAAE,YAAY;QAC3B,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACvD,CAAC;IAED,OAAO,CAAC,MAAM,EAAE,YAAY;QAC1B,OAAO,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACtD,CAAC;IAED,SAAS,CAAC,MAAM,EAAE,YAAY;QAC5B,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACxD,CAAC;IAED,QAAQ,CAAC,MAAM,EAAE,YAAY;QAC3B,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACvD,CAAC;IAED,SAAS,CAAC,MAAM,EAAE,YAAY;QAC5B,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACxD,CAAC;IAED,QAAQ,CAAC,MAAM,EAAE,YAAY;QAC3B,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACvD,CAAC;IAED,UAAU,CAAC,MAAM,EAAE,YAAY;QAC7B,OAAO,IAAA,oBAAU,EAAC,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;IAC1D,CAAC;IAED,UAAU,CAAC,MAAM,EAAE,YAAY;QAC7B,OAAO,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACzD,CAAC;IAED,UAAU,CAAC,MAAM,EAAE,YAAY;QAC7B,OAAO,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACzD,CAAC;CACF;AA9FD,6BA8FC"}

209
node_modules/geotiff/dist-node/geotiff.d.ts generated vendored Normal file
View File

@@ -0,0 +1,209 @@
/**
* Creates a new GeoTIFF from a remote URL.
* @param {string} url The URL to access the image from
* @param {object} [options] Additional options to pass to the source.
* See {@link makeRemoteSource} for details.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
export function fromUrl(url: string, options?: object, signal?: AbortSignal | undefined): Promise<GeoTIFF>;
/**
* Construct a new GeoTIFF from an
* [ArrayBuffer]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer}.
* @param {ArrayBuffer} arrayBuffer The data to read the file from.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
export function fromArrayBuffer(arrayBuffer: ArrayBuffer, signal?: AbortSignal | undefined): Promise<GeoTIFF>;
/**
* Construct a GeoTIFF from a local file path. This uses the node
* [filesystem API]{@link https://nodejs.org/api/fs.html} and is
* not available on browsers.
*
* N.B. After the GeoTIFF has been completely processed it needs
* to be closed but only if it has been constructed from a file.
* @param {string} path The file path to read from.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
export function fromFile(path: string, signal?: AbortSignal | undefined): Promise<GeoTIFF>;
/**
* Construct a GeoTIFF from an HTML
* [Blob]{@link https://developer.mozilla.org/en-US/docs/Web/API/Blob} or
* [File]{@link https://developer.mozilla.org/en-US/docs/Web/API/File}
* object.
* @param {Blob|File} blob The Blob or File object to read from.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
export function fromBlob(blob: Blob | File, signal?: AbortSignal | undefined): Promise<GeoTIFF>;
/**
* Construct a MultiGeoTIFF from the given URLs.
* @param {string} mainUrl The URL for the main file.
* @param {string[]} overviewUrls An array of URLs for the overview images.
* @param {Object} [options] Additional options to pass to the source.
* See [makeRemoteSource]{@link module:source.makeRemoteSource}
* for details.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<MultiGeoTIFF>} The resulting MultiGeoTIFF file.
*/
export function fromUrls(mainUrl: string, overviewUrls?: string[], options?: any, signal?: AbortSignal | undefined): Promise<MultiGeoTIFF>;
/**
* Main creating function for GeoTIFF files.
* @param {(Array)} array of pixel values
* @returns {metadata} metadata
*/
export function writeArrayBuffer(values: any, metadata: any): any;
export default GeoTIFF;
export type TypedArray = Uint8Array | Int8Array | Uint16Array | Int16Array | Uint32Array | Int32Array | Float32Array | Float64Array;
export type GeoTIFFOptions = {
/**
* whether or not decoded tiles shall be cached.
*/
cache?: boolean | undefined;
};
/**
* @typedef {Object} GeoTIFFOptions
* @property {boolean} [cache=false] whether or not decoded tiles shall be cached.
*/
/**
* The abstraction for a whole GeoTIFF file.
* @augments GeoTIFFBase
*/
export class GeoTIFF extends GeoTIFFBase {
/**
* Parse a (Geo)TIFF file from the given source.
*
* @param {*} source The source of data to parse from.
* @param {GeoTIFFOptions} [options] Additional options.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
*/
static fromSource(source: any, options?: GeoTIFFOptions | undefined, signal?: AbortSignal | undefined): Promise<GeoTIFF>;
/**
* @constructor
* @param {*} source The datasource to read from.
* @param {boolean} littleEndian Whether the image uses little endian.
* @param {boolean} bigTiff Whether the image uses bigTIFF conventions.
* @param {number} firstIFDOffset The numeric byte-offset from the start of the image
* to the first IFD.
* @param {GeoTIFFOptions} [options] further options.
*/
constructor(source: any, littleEndian: boolean, bigTiff: boolean, firstIFDOffset: number, options?: GeoTIFFOptions | undefined);
source: any;
littleEndian: boolean;
bigTiff: boolean;
firstIFDOffset: number;
cache: boolean;
ifdRequests: any[];
ghostValues: {} | null;
getSlice(offset: any, size: any): Promise<DataSlice>;
/**
* Instructs to parse an image file directory at the given file offset.
* As there is no way to ensure that a location is indeed the start of an IFD,
* this function must be called with caution (e.g only using the IFD offsets from
* the headers or other IFDs).
* @param {number} offset the offset to parse the IFD at
* @returns {Promise<ImageFileDirectory>} the parsed IFD
*/
parseFileDirectoryAt(offset: number): Promise<ImageFileDirectory>;
requestIFD(index: any): Promise<any>;
/**
* Get the n-th internal subfile of an image. By default, the first is returned.
*
* @param {number} [index=0] the index of the image to return.
* @returns {Promise<GeoTIFFImage>} the image at the given index
*/
getImage(index?: number | undefined): Promise<GeoTIFFImage>;
/**
* Returns the count of the internal subfiles.
*
* @returns {Promise<number>} the number of internal subfile images
*/
getImageCount(): Promise<number>;
/**
* Get the values of the COG ghost area as a parsed map.
* See https://gdal.org/drivers/raster/cog.html#header-ghost-area for reference
* @returns {Promise<Object>} the parsed ghost area or null, if no such area was found
*/
getGhostValues(): Promise<any>;
/**
* Closes the underlying file buffer
* N.B. After the GeoTIFF has been completely processed it needs
* to be closed but only if it has been constructed from a file.
*/
close(): any;
}
/**
* Wrapper for GeoTIFF files that have external overviews.
* @augments GeoTIFFBase
*/
export class MultiGeoTIFF extends GeoTIFFBase {
/**
* Construct a new MultiGeoTIFF from a main and several overview files.
* @param {GeoTIFF} mainFile The main GeoTIFF file.
* @param {GeoTIFF[]} overviewFiles An array of overview files.
*/
constructor(mainFile: GeoTIFF, overviewFiles: GeoTIFF[]);
mainFile: GeoTIFF;
overviewFiles: GeoTIFF[];
imageFiles: GeoTIFF[];
fileDirectoriesPerFile: ImageFileDirectory[] | null;
fileDirectoriesPerFileParsing: any;
imageCount: number | null;
parseFileDirectoriesPerFile(): Promise<ImageFileDirectory[]>;
/**
* Get the n-th internal subfile of an image. By default, the first is returned.
*
* @param {number} [index=0] the index of the image to return.
* @returns {Promise<GeoTIFFImage>} the image at the given index
*/
getImage(index?: number | undefined): Promise<GeoTIFFImage>;
/**
* Returns the count of the internal subfiles.
*
* @returns {Promise<number>} the number of internal subfile images
*/
getImageCount(): Promise<number>;
imageCounts: number[] | undefined;
}
import * as globals from "./globals.js";
import * as rgb from "./rgb.js";
import { getDecoder } from "./compression/index.js";
import { addDecoder } from "./compression/index.js";
import { setLogger } from "./logging.js";
import Pool from "./pool.js";
import GeoTIFFImage from "./geotiffimage.js";
declare class GeoTIFFBase {
/**
* (experimental) Reads raster data from the best fitting image. This function uses
* the image with the lowest resolution that is still a higher resolution than the
* requested resolution.
* When specified, the `bbox` option is translated to the `window` option and the
* `resX` and `resY` to `width` and `height` respectively.
* Then, the [readRasters]{@link GeoTIFFImage#readRasters} method of the selected
* image is called and the result returned.
* @see GeoTIFFImage.readRasters
* @param {import('./geotiffimage').ReadRasterOptions} [options={}] optional parameters
* @returns {Promise<(TypedArray|TypedArray[])>} the decoded arrays as a promise
*/
readRasters(options?: import("./geotiffimage.js").ReadRasterOptions | undefined): Promise<(TypedArray | TypedArray[])>;
}
import DataSlice from "./dataslice.js";
/**
* Data class to store the parsed file directory, geo key directory and
* offset to the next IFD
*/
declare class ImageFileDirectory {
constructor(fileDirectory: any, geoKeyDirectory: any, nextIFDByteOffset: any);
fileDirectory: any;
geoKeyDirectory: any;
nextIFDByteOffset: any;
}
export { globals, rgb, getDecoder, addDecoder, setLogger, Pool, GeoTIFFImage };
//# sourceMappingURL=geotiff.d.ts.map

1
node_modules/geotiff/dist-node/geotiff.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"geotiff.d.ts","sourceRoot":"","sources":["../dist-module/geotiff.js"],"names":[],"mappings":"AAuoBA;;;;;;;;GAQG;AACH,6BAPW,MAAM,YACN,MAAM,qCAIJ,QAAQ,OAAO,CAAC,CAI5B;AAED;;;;;;;GAOG;AACH,6CALW,WAAW,qCAGT,QAAQ,OAAO,CAAC,CAI5B;AAED;;;;;;;;;;;GAWG;AACH,+BALW,MAAM,qCAGJ,QAAQ,OAAO,CAAC,CAI5B;AAED;;;;;;;;;GASG;AACH,+BALW,IAAI,OAAK,qCAGP,QAAQ,OAAO,CAAC,CAI5B;AAED;;;;;;;;;;GAUG;AACH,kCATW,MAAM,iBACN,MAAM,EAAE,oDAMN,QAAQ,YAAY,CAAC,CASjC;AAED;;;;GAIG;AACH,kEAEC;;yBAjsBY,UAAU,GAAG,SAAS,GAAG,WAAW,GAAG,UAAU,GAAG,WAAW,GAAG,UAAU,GAAG,YAAY,GAAG,YAAY;;;;;;;AA+PvH;;;GAGG;AAEH;;;GAGG;AACH;IA+NE;;;;;;;OAOG;IACH,yHAgCC;IAtQD;;;;;;;;OAQG;IACH,uCANW,OAAO,WACP,OAAO,kBACP,MAAM,wCAahB;IAPC,YAAoB;IACpB,sBAAgC;IAChC,iBAAsB;IACtB,uBAAoC;IACpC,eAAmC;IACnC,mBAAqB;IACrB,uBAAuB;IAGzB,qDAWC;IAED;;;;;;;OAOG;IACH,6BAHW,MAAM,GACJ,QAAQ,kBAAkB,CAAC,CA0EvC;IAED,qCAkCC;IAED;;;;;OAKG;IACH,sCAFa,QAAQ,YAAY,CAAC,CAQjC;IAED;;;;OAIG;IACH,iBAFa,QAAQ,MAAM,CAAC,CAmB3B;IAED;;;;OAIG;IACH,kBAFa,YAAe,CA4B3B;IA4CD;;;;OAIG;IACH,aAKC;CACF;AAKD;;;GAGG;AACH;IACE;;;;OAIG;IACH,sBAHW,OAAO,iBACP,OAAO,EAAE,EAWnB;IAPC,kBAAwB;IACxB,yBAAkC;IAClC,sBAAkD;IAElD,oDAAkC;IAClC,mCAAyC;IACzC,0BAAsB;IAGxB,6DAMC;IAED;;;;;OAKG;IACH,sCAFa,QAAQ,YAAY,CAAC,CAwBjC;IAED;;;;OAIG;IACH,iBAFa,QAAQ,MAAM,CAAC,CAW3B;IAHC,kCAA8C;CAIjD;;;;;;;;AAvdD;IACE;;;;;;;;;;;OAWG;IACH,kFAFa,QAAQ,CAAC,UAAU,GAAC,UAAU,EAAE,CAAC,CAAC,CA6F9C;CACF;;AAhID;;;GAGG;AACH;IACE,8EAIC;IAHC,mBAAkC;IAClC,qBAAsC;IACtC,uBAA0C;CAE7C"}

716
node_modules/geotiff/dist-node/geotiff.js generated vendored Normal file
View File

@@ -0,0 +1,716 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.GeoTIFFImage = exports.Pool = exports.writeArrayBuffer = exports.fromUrls = exports.fromBlob = exports.fromFile = exports.fromArrayBuffer = exports.fromUrl = exports.MultiGeoTIFF = exports.GeoTIFF = exports.setLogger = exports.addDecoder = exports.getDecoder = exports.rgb = exports.globals = void 0;
/** @module geotiff */
const geotiffimage_js_1 = __importDefault(require("./geotiffimage.js"));
exports.GeoTIFFImage = geotiffimage_js_1.default;
const dataview64_js_1 = __importDefault(require("./dataview64.js"));
const dataslice_js_1 = __importDefault(require("./dataslice.js"));
const pool_js_1 = __importDefault(require("./pool.js"));
exports.Pool = pool_js_1.default;
const remote_js_1 = require("./source/remote.js");
const arraybuffer_js_1 = require("./source/arraybuffer.js");
const filereader_js_1 = require("./source/filereader.js");
const file_js_1 = require("./source/file.js");
const globals_js_1 = require("./globals.js");
const geotiffwriter_js_1 = require("./geotiffwriter.js");
const globals = __importStar(require("./globals.js"));
exports.globals = globals;
const rgb = __importStar(require("./rgb.js"));
exports.rgb = rgb;
const index_js_1 = require("./compression/index.js");
Object.defineProperty(exports, "getDecoder", { enumerable: true, get: function () { return index_js_1.getDecoder; } });
Object.defineProperty(exports, "addDecoder", { enumerable: true, get: function () { return index_js_1.addDecoder; } });
const logging_js_1 = require("./logging.js");
Object.defineProperty(exports, "setLogger", { enumerable: true, get: function () { return logging_js_1.setLogger; } });
/**
* @typedef {Uint8Array | Int8Array | Uint16Array | Int16Array | Uint32Array | Int32Array | Float32Array | Float64Array}
* TypedArray
*/
function getFieldTypeLength(fieldType) {
switch (fieldType) {
case globals_js_1.fieldTypes.BYTE:
case globals_js_1.fieldTypes.ASCII:
case globals_js_1.fieldTypes.SBYTE:
case globals_js_1.fieldTypes.UNDEFINED:
return 1;
case globals_js_1.fieldTypes.SHORT:
case globals_js_1.fieldTypes.SSHORT:
return 2;
case globals_js_1.fieldTypes.LONG:
case globals_js_1.fieldTypes.SLONG:
case globals_js_1.fieldTypes.FLOAT:
case globals_js_1.fieldTypes.IFD:
return 4;
case globals_js_1.fieldTypes.RATIONAL:
case globals_js_1.fieldTypes.SRATIONAL:
case globals_js_1.fieldTypes.DOUBLE:
case globals_js_1.fieldTypes.LONG8:
case globals_js_1.fieldTypes.SLONG8:
case globals_js_1.fieldTypes.IFD8:
return 8;
default:
throw new RangeError(`Invalid field type: ${fieldType}`);
}
}
function parseGeoKeyDirectory(fileDirectory) {
const rawGeoKeyDirectory = fileDirectory.GeoKeyDirectory;
if (!rawGeoKeyDirectory) {
return null;
}
const geoKeyDirectory = {};
for (let i = 4; i <= rawGeoKeyDirectory[3] * 4; i += 4) {
const key = globals_js_1.geoKeyNames[rawGeoKeyDirectory[i]];
const location = (rawGeoKeyDirectory[i + 1])
? (globals_js_1.fieldTagNames[rawGeoKeyDirectory[i + 1]]) : null;
const count = rawGeoKeyDirectory[i + 2];
const offset = rawGeoKeyDirectory[i + 3];
let value = null;
if (!location) {
value = offset;
}
else {
value = fileDirectory[location];
if (typeof value === 'undefined' || value === null) {
throw new Error(`Could not get value of geoKey '${key}'.`);
}
else if (typeof value === 'string') {
value = value.substring(offset, offset + count - 1);
}
else if (value.subarray) {
value = value.subarray(offset, offset + count);
if (count === 1) {
value = value[0];
}
}
}
geoKeyDirectory[key] = value;
}
return geoKeyDirectory;
}
function getValues(dataSlice, fieldType, count, offset) {
let values = null;
let readMethod = null;
const fieldTypeLength = getFieldTypeLength(fieldType);
switch (fieldType) {
case globals_js_1.fieldTypes.BYTE:
case globals_js_1.fieldTypes.ASCII:
case globals_js_1.fieldTypes.UNDEFINED:
values = new Uint8Array(count);
readMethod = dataSlice.readUint8;
break;
case globals_js_1.fieldTypes.SBYTE:
values = new Int8Array(count);
readMethod = dataSlice.readInt8;
break;
case globals_js_1.fieldTypes.SHORT:
values = new Uint16Array(count);
readMethod = dataSlice.readUint16;
break;
case globals_js_1.fieldTypes.SSHORT:
values = new Int16Array(count);
readMethod = dataSlice.readInt16;
break;
case globals_js_1.fieldTypes.LONG:
case globals_js_1.fieldTypes.IFD:
values = new Uint32Array(count);
readMethod = dataSlice.readUint32;
break;
case globals_js_1.fieldTypes.SLONG:
values = new Int32Array(count);
readMethod = dataSlice.readInt32;
break;
case globals_js_1.fieldTypes.LONG8:
case globals_js_1.fieldTypes.IFD8:
values = new Array(count);
readMethod = dataSlice.readUint64;
break;
case globals_js_1.fieldTypes.SLONG8:
values = new Array(count);
readMethod = dataSlice.readInt64;
break;
case globals_js_1.fieldTypes.RATIONAL:
values = new Uint32Array(count * 2);
readMethod = dataSlice.readUint32;
break;
case globals_js_1.fieldTypes.SRATIONAL:
values = new Int32Array(count * 2);
readMethod = dataSlice.readInt32;
break;
case globals_js_1.fieldTypes.FLOAT:
values = new Float32Array(count);
readMethod = dataSlice.readFloat32;
break;
case globals_js_1.fieldTypes.DOUBLE:
values = new Float64Array(count);
readMethod = dataSlice.readFloat64;
break;
default:
throw new RangeError(`Invalid field type: ${fieldType}`);
}
// normal fields
if (!(fieldType === globals_js_1.fieldTypes.RATIONAL || fieldType === globals_js_1.fieldTypes.SRATIONAL)) {
for (let i = 0; i < count; ++i) {
values[i] = readMethod.call(dataSlice, offset + (i * fieldTypeLength));
}
}
else { // RATIONAL or SRATIONAL
for (let i = 0; i < count; i += 2) {
values[i] = readMethod.call(dataSlice, offset + (i * fieldTypeLength));
values[i + 1] = readMethod.call(dataSlice, offset + ((i * fieldTypeLength) + 4));
}
}
if (fieldType === globals_js_1.fieldTypes.ASCII) {
return new TextDecoder('utf-8').decode(values);
}
return values;
}
/**
* Data class to store the parsed file directory, geo key directory and
* offset to the next IFD
*/
class ImageFileDirectory {
constructor(fileDirectory, geoKeyDirectory, nextIFDByteOffset) {
this.fileDirectory = fileDirectory;
this.geoKeyDirectory = geoKeyDirectory;
this.nextIFDByteOffset = nextIFDByteOffset;
}
}
/**
* Error class for cases when an IFD index was requested, that does not exist
* in the file.
*/
class GeoTIFFImageIndexError extends Error {
constructor(index) {
super(`No image at index ${index}`);
this.index = index;
}
}
class GeoTIFFBase {
/**
* (experimental) Reads raster data from the best fitting image. This function uses
* the image with the lowest resolution that is still a higher resolution than the
* requested resolution.
* When specified, the `bbox` option is translated to the `window` option and the
* `resX` and `resY` to `width` and `height` respectively.
* Then, the [readRasters]{@link GeoTIFFImage#readRasters} method of the selected
* image is called and the result returned.
* @see GeoTIFFImage.readRasters
* @param {import('./geotiffimage').ReadRasterOptions} [options={}] optional parameters
* @returns {Promise<(TypedArray|TypedArray[])>} the decoded arrays as a promise
*/
async readRasters(options = {}) {
const { window: imageWindow, width, height } = options;
let { resX, resY, bbox } = options;
const firstImage = await this.getImage();
let usedImage = firstImage;
const imageCount = await this.getImageCount();
const imgBBox = firstImage.getBoundingBox();
if (imageWindow && bbox) {
throw new Error('Both "bbox" and "window" passed.');
}
// if width/height is passed, transform it to resolution
if (width || height) {
// if we have an image window (pixel coordinates), transform it to a BBox
// using the origin/resolution of the first image.
if (imageWindow) {
const [oX, oY] = firstImage.getOrigin();
const [rX, rY] = firstImage.getResolution();
bbox = [
oX + (imageWindow[0] * rX),
oY + (imageWindow[1] * rY),
oX + (imageWindow[2] * rX),
oY + (imageWindow[3] * rY),
];
}
// if we have a bbox (or calculated one)
const usedBBox = bbox || imgBBox;
if (width) {
if (resX) {
throw new Error('Both width and resX passed');
}
resX = (usedBBox[2] - usedBBox[0]) / width;
}
if (height) {
if (resY) {
throw new Error('Both width and resY passed');
}
resY = (usedBBox[3] - usedBBox[1]) / height;
}
}
// if resolution is set or calculated, try to get the image with the worst acceptable resolution
if (resX || resY) {
const allImages = [];
for (let i = 0; i < imageCount; ++i) {
const image = await this.getImage(i);
const { SubfileType: subfileType, NewSubfileType: newSubfileType } = image.fileDirectory;
if (i === 0 || subfileType === 2 || newSubfileType & 1) {
allImages.push(image);
}
}
allImages.sort((a, b) => a.getWidth() - b.getWidth());
for (let i = 0; i < allImages.length; ++i) {
const image = allImages[i];
const imgResX = (imgBBox[2] - imgBBox[0]) / image.getWidth();
const imgResY = (imgBBox[3] - imgBBox[1]) / image.getHeight();
usedImage = image;
if ((resX && resX > imgResX) || (resY && resY > imgResY)) {
break;
}
}
}
let wnd = imageWindow;
if (bbox) {
const [oX, oY] = firstImage.getOrigin();
const [imageResX, imageResY] = usedImage.getResolution(firstImage);
wnd = [
Math.round((bbox[0] - oX) / imageResX),
Math.round((bbox[1] - oY) / imageResY),
Math.round((bbox[2] - oX) / imageResX),
Math.round((bbox[3] - oY) / imageResY),
];
wnd = [
Math.min(wnd[0], wnd[2]),
Math.min(wnd[1], wnd[3]),
Math.max(wnd[0], wnd[2]),
Math.max(wnd[1], wnd[3]),
];
}
return usedImage.readRasters({ ...options, window: wnd });
}
}
/**
* @typedef {Object} GeoTIFFOptions
* @property {boolean} [cache=false] whether or not decoded tiles shall be cached.
*/
/**
* The abstraction for a whole GeoTIFF file.
* @augments GeoTIFFBase
*/
class GeoTIFF extends GeoTIFFBase {
/**
* @constructor
* @param {*} source The datasource to read from.
* @param {boolean} littleEndian Whether the image uses little endian.
* @param {boolean} bigTiff Whether the image uses bigTIFF conventions.
* @param {number} firstIFDOffset The numeric byte-offset from the start of the image
* to the first IFD.
* @param {GeoTIFFOptions} [options] further options.
*/
constructor(source, littleEndian, bigTiff, firstIFDOffset, options = {}) {
super();
this.source = source;
this.littleEndian = littleEndian;
this.bigTiff = bigTiff;
this.firstIFDOffset = firstIFDOffset;
this.cache = options.cache || false;
this.ifdRequests = [];
this.ghostValues = null;
}
async getSlice(offset, size) {
const fallbackSize = this.bigTiff ? 4048 : 1024;
return new dataslice_js_1.default((await this.source.fetch([{
offset,
length: typeof size !== 'undefined' ? size : fallbackSize,
}]))[0], offset, this.littleEndian, this.bigTiff);
}
/**
* Instructs to parse an image file directory at the given file offset.
* As there is no way to ensure that a location is indeed the start of an IFD,
* this function must be called with caution (e.g only using the IFD offsets from
* the headers or other IFDs).
* @param {number} offset the offset to parse the IFD at
* @returns {Promise<ImageFileDirectory>} the parsed IFD
*/
async parseFileDirectoryAt(offset) {
const entrySize = this.bigTiff ? 20 : 12;
const offsetSize = this.bigTiff ? 8 : 2;
let dataSlice = await this.getSlice(offset);
const numDirEntries = this.bigTiff
? dataSlice.readUint64(offset)
: dataSlice.readUint16(offset);
// if the slice does not cover the whole IFD, request a bigger slice, where the
// whole IFD fits: num of entries + n x tag length + offset to next IFD
const byteSize = (numDirEntries * entrySize) + (this.bigTiff ? 16 : 6);
if (!dataSlice.covers(offset, byteSize)) {
dataSlice = await this.getSlice(offset, byteSize);
}
const fileDirectory = {};
// loop over the IFD and create a file directory object
let i = offset + (this.bigTiff ? 8 : 2);
for (let entryCount = 0; entryCount < numDirEntries; i += entrySize, ++entryCount) {
const fieldTag = dataSlice.readUint16(i);
const fieldType = dataSlice.readUint16(i + 2);
const typeCount = this.bigTiff
? dataSlice.readUint64(i + 4)
: dataSlice.readUint32(i + 4);
let fieldValues;
let value;
const fieldTypeLength = getFieldTypeLength(fieldType);
const valueOffset = i + (this.bigTiff ? 12 : 8);
// check whether the value is directly encoded in the tag or refers to a
// different external byte range
if (fieldTypeLength * typeCount <= (this.bigTiff ? 8 : 4)) {
fieldValues = getValues(dataSlice, fieldType, typeCount, valueOffset);
}
else {
// resolve the reference to the actual byte range
const actualOffset = dataSlice.readOffset(valueOffset);
const length = getFieldTypeLength(fieldType) * typeCount;
// check, whether we actually cover the referenced byte range; if not,
// request a new slice of bytes to read from it
if (dataSlice.covers(actualOffset, length)) {
fieldValues = getValues(dataSlice, fieldType, typeCount, actualOffset);
}
else {
const fieldDataSlice = await this.getSlice(actualOffset, length);
fieldValues = getValues(fieldDataSlice, fieldType, typeCount, actualOffset);
}
}
// unpack single values from the array
if (typeCount === 1 && globals_js_1.arrayFields.indexOf(fieldTag) === -1
&& !(fieldType === globals_js_1.fieldTypes.RATIONAL || fieldType === globals_js_1.fieldTypes.SRATIONAL)) {
value = fieldValues[0];
}
else {
value = fieldValues;
}
// write the tags value to the file directly
fileDirectory[globals_js_1.fieldTagNames[fieldTag]] = value;
}
const geoKeyDirectory = parseGeoKeyDirectory(fileDirectory);
const nextIFDByteOffset = dataSlice.readOffset(offset + offsetSize + (entrySize * numDirEntries));
return new ImageFileDirectory(fileDirectory, geoKeyDirectory, nextIFDByteOffset);
}
async requestIFD(index) {
// see if we already have that IFD index requested.
if (this.ifdRequests[index]) {
// attach to an already requested IFD
return this.ifdRequests[index];
}
else if (index === 0) {
// special case for index 0
this.ifdRequests[index] = this.parseFileDirectoryAt(this.firstIFDOffset);
return this.ifdRequests[index];
}
else if (!this.ifdRequests[index - 1]) {
// if the previous IFD was not yet loaded, load that one first
// this is the recursive call.
try {
this.ifdRequests[index - 1] = this.requestIFD(index - 1);
}
catch (e) {
// if the previous one already was an index error, rethrow
// with the current index
if (e instanceof GeoTIFFImageIndexError) {
throw new GeoTIFFImageIndexError(index);
}
// rethrow anything else
throw e;
}
}
// if the previous IFD was loaded, we can finally fetch the one we are interested in.
// we need to wrap this in an IIFE, otherwise this.ifdRequests[index] would be delayed
this.ifdRequests[index] = (async () => {
const previousIfd = await this.ifdRequests[index - 1];
if (previousIfd.nextIFDByteOffset === 0) {
throw new GeoTIFFImageIndexError(index);
}
return this.parseFileDirectoryAt(previousIfd.nextIFDByteOffset);
})();
return this.ifdRequests[index];
}
/**
* Get the n-th internal subfile of an image. By default, the first is returned.
*
* @param {number} [index=0] the index of the image to return.
* @returns {Promise<GeoTIFFImage>} the image at the given index
*/
async getImage(index = 0) {
const ifd = await this.requestIFD(index);
return new geotiffimage_js_1.default(ifd.fileDirectory, ifd.geoKeyDirectory, this.dataView, this.littleEndian, this.cache, this.source);
}
/**
* Returns the count of the internal subfiles.
*
* @returns {Promise<number>} the number of internal subfile images
*/
async getImageCount() {
let index = 0;
// loop until we run out of IFDs
let hasNext = true;
while (hasNext) {
try {
await this.requestIFD(index);
++index;
}
catch (e) {
if (e instanceof GeoTIFFImageIndexError) {
hasNext = false;
}
else {
throw e;
}
}
}
return index;
}
/**
* Get the values of the COG ghost area as a parsed map.
* See https://gdal.org/drivers/raster/cog.html#header-ghost-area for reference
* @returns {Promise<Object>} the parsed ghost area or null, if no such area was found
*/
async getGhostValues() {
const offset = this.bigTiff ? 16 : 8;
if (this.ghostValues) {
return this.ghostValues;
}
const detectionString = 'GDAL_STRUCTURAL_METADATA_SIZE=';
const heuristicAreaSize = detectionString.length + 100;
let slice = await this.getSlice(offset, heuristicAreaSize);
if (detectionString === getValues(slice, globals_js_1.fieldTypes.ASCII, detectionString.length, offset)) {
const valuesString = getValues(slice, globals_js_1.fieldTypes.ASCII, heuristicAreaSize, offset);
const firstLine = valuesString.split('\n')[0];
const metadataSize = Number(firstLine.split('=')[1].split(' ')[0]) + firstLine.length;
if (metadataSize > heuristicAreaSize) {
slice = await this.getSlice(offset, metadataSize);
}
const fullString = getValues(slice, globals_js_1.fieldTypes.ASCII, metadataSize, offset);
this.ghostValues = {};
fullString
.split('\n')
.filter((line) => line.length > 0)
.map((line) => line.split('='))
.forEach(([key, value]) => {
this.ghostValues[key] = value;
});
}
return this.ghostValues;
}
/**
* Parse a (Geo)TIFF file from the given source.
*
* @param {*} source The source of data to parse from.
* @param {GeoTIFFOptions} [options] Additional options.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
*/
static async fromSource(source, options, signal) {
const headerData = (await source.fetch([{ offset: 0, length: 1024 }], signal))[0];
const dataView = new dataview64_js_1.default(headerData);
const BOM = dataView.getUint16(0, 0);
let littleEndian;
if (BOM === 0x4949) {
littleEndian = true;
}
else if (BOM === 0x4D4D) {
littleEndian = false;
}
else {
throw new TypeError('Invalid byte order value.');
}
const magicNumber = dataView.getUint16(2, littleEndian);
let bigTiff;
if (magicNumber === 42) {
bigTiff = false;
}
else if (magicNumber === 43) {
bigTiff = true;
const offsetByteSize = dataView.getUint16(4, littleEndian);
if (offsetByteSize !== 8) {
throw new Error('Unsupported offset byte-size.');
}
}
else {
throw new TypeError('Invalid magic number.');
}
const firstIFDOffset = bigTiff
? dataView.getUint64(8, littleEndian)
: dataView.getUint32(4, littleEndian);
return new GeoTIFF(source, littleEndian, bigTiff, firstIFDOffset, options);
}
/**
* Closes the underlying file buffer
* N.B. After the GeoTIFF has been completely processed it needs
* to be closed but only if it has been constructed from a file.
*/
close() {
if (typeof this.source.close === 'function') {
return this.source.close();
}
return false;
}
}
exports.GeoTIFF = GeoTIFF;
exports.default = GeoTIFF;
/**
* Wrapper for GeoTIFF files that have external overviews.
* @augments GeoTIFFBase
*/
class MultiGeoTIFF extends GeoTIFFBase {
/**
* Construct a new MultiGeoTIFF from a main and several overview files.
* @param {GeoTIFF} mainFile The main GeoTIFF file.
* @param {GeoTIFF[]} overviewFiles An array of overview files.
*/
constructor(mainFile, overviewFiles) {
super();
this.mainFile = mainFile;
this.overviewFiles = overviewFiles;
this.imageFiles = [mainFile].concat(overviewFiles);
this.fileDirectoriesPerFile = null;
this.fileDirectoriesPerFileParsing = null;
this.imageCount = null;
}
async parseFileDirectoriesPerFile() {
const requests = [this.mainFile.parseFileDirectoryAt(this.mainFile.firstIFDOffset)]
.concat(this.overviewFiles.map((file) => file.parseFileDirectoryAt(file.firstIFDOffset)));
this.fileDirectoriesPerFile = await Promise.all(requests);
return this.fileDirectoriesPerFile;
}
/**
* Get the n-th internal subfile of an image. By default, the first is returned.
*
* @param {number} [index=0] the index of the image to return.
* @returns {Promise<GeoTIFFImage>} the image at the given index
*/
async getImage(index = 0) {
await this.getImageCount();
await this.parseFileDirectoriesPerFile();
let visited = 0;
let relativeIndex = 0;
for (let i = 0; i < this.imageFiles.length; i++) {
const imageFile = this.imageFiles[i];
for (let ii = 0; ii < this.imageCounts[i]; ii++) {
if (index === visited) {
const ifd = await imageFile.requestIFD(relativeIndex);
return new geotiffimage_js_1.default(ifd.fileDirectory, ifd.geoKeyDirectory, imageFile.dataView, imageFile.littleEndian, imageFile.cache, imageFile.source);
}
visited++;
relativeIndex++;
}
relativeIndex = 0;
}
throw new RangeError('Invalid image index');
}
/**
* Returns the count of the internal subfiles.
*
* @returns {Promise<number>} the number of internal subfile images
*/
async getImageCount() {
if (this.imageCount !== null) {
return this.imageCount;
}
const requests = [this.mainFile.getImageCount()]
.concat(this.overviewFiles.map((file) => file.getImageCount()));
this.imageCounts = await Promise.all(requests);
this.imageCount = this.imageCounts.reduce((count, ifds) => count + ifds, 0);
return this.imageCount;
}
}
exports.MultiGeoTIFF = MultiGeoTIFF;
/**
* Creates a new GeoTIFF from a remote URL.
* @param {string} url The URL to access the image from
* @param {object} [options] Additional options to pass to the source.
* See {@link makeRemoteSource} for details.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
async function fromUrl(url, options = {}, signal) {
return GeoTIFF.fromSource((0, remote_js_1.makeRemoteSource)(url, options), signal);
}
exports.fromUrl = fromUrl;
/**
* Construct a new GeoTIFF from an
* [ArrayBuffer]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer}.
* @param {ArrayBuffer} arrayBuffer The data to read the file from.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
async function fromArrayBuffer(arrayBuffer, signal) {
return GeoTIFF.fromSource((0, arraybuffer_js_1.makeBufferSource)(arrayBuffer), signal);
}
exports.fromArrayBuffer = fromArrayBuffer;
/**
* Construct a GeoTIFF from a local file path. This uses the node
* [filesystem API]{@link https://nodejs.org/api/fs.html} and is
* not available on browsers.
*
* N.B. After the GeoTIFF has been completely processed it needs
* to be closed but only if it has been constructed from a file.
* @param {string} path The file path to read from.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
async function fromFile(path, signal) {
return GeoTIFF.fromSource((0, file_js_1.makeFileSource)(path), signal);
}
exports.fromFile = fromFile;
/**
* Construct a GeoTIFF from an HTML
* [Blob]{@link https://developer.mozilla.org/en-US/docs/Web/API/Blob} or
* [File]{@link https://developer.mozilla.org/en-US/docs/Web/API/File}
* object.
* @param {Blob|File} blob The Blob or File object to read from.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<GeoTIFF>} The resulting GeoTIFF file.
*/
async function fromBlob(blob, signal) {
return GeoTIFF.fromSource((0, filereader_js_1.makeFileReaderSource)(blob), signal);
}
exports.fromBlob = fromBlob;
/**
* Construct a MultiGeoTIFF from the given URLs.
* @param {string} mainUrl The URL for the main file.
* @param {string[]} overviewUrls An array of URLs for the overview images.
* @param {Object} [options] Additional options to pass to the source.
* See [makeRemoteSource]{@link module:source.makeRemoteSource}
* for details.
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<MultiGeoTIFF>} The resulting MultiGeoTIFF file.
*/
async function fromUrls(mainUrl, overviewUrls = [], options = {}, signal) {
const mainFile = await GeoTIFF.fromSource((0, remote_js_1.makeRemoteSource)(mainUrl, options), signal);
const overviewFiles = await Promise.all(overviewUrls.map((url) => GeoTIFF.fromSource((0, remote_js_1.makeRemoteSource)(url, options))));
return new MultiGeoTIFF(mainFile, overviewFiles);
}
exports.fromUrls = fromUrls;
/**
* Main creating function for GeoTIFF files.
* @param {(Array)} array of pixel values
* @returns {metadata} metadata
*/
function writeArrayBuffer(values, metadata) {
return (0, geotiffwriter_js_1.writeGeotiff)(values, metadata);
}
exports.writeArrayBuffer = writeArrayBuffer;
//# sourceMappingURL=geotiff.js.map

1
node_modules/geotiff/dist-node/geotiff.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

243
node_modules/geotiff/dist-node/geotiffimage.d.ts generated vendored Normal file
View File

@@ -0,0 +1,243 @@
export default GeoTIFFImage;
export type ReadRasterOptions = {
/**
* window] the subset to read data from in pixels.
*/
window?: number[] | undefined;
/**
* image] the subset to read data from in
* geographical coordinates.
*/
bbox?: number[] | undefined;
/**
* samples] the selection of samples to read from. Default is all samples.
*/
samples?: number[] | undefined;
/**
* whether the data shall be read
* in one single array or separate
* arrays.
*/
interleave?: boolean | undefined;
/**
* The optional decoder pool to use.
*/
pool?: any;
/**
* The desired width of the output. When the width is not the
* same as the images, resampling will be performed.
*/
width?: number | undefined;
/**
* The desired height of the output. When the width is not the
* same as the images, resampling will be performed.
*/
height?: number | undefined;
/**
* The desired resampling method.
*/
resampleMethod?: string | undefined;
/**
* An AbortSignal that may be signalled if the request is
* to be aborted
*/
signal?: AbortSignal | undefined;
/**
* The value to use for parts of the image
* outside of the images extent. When multiple
* samples are requested, an array of fill values
* can be passed.
*/
fillValue?: number | number[] | undefined;
};
export type TypedArray = import("./geotiff.js").TypedArray;
/**
* GeoTIFF sub-file image.
*/
declare class GeoTIFFImage {
/**
* @constructor
* @param {Object} fileDirectory The parsed file directory
* @param {Object} geoKeys The parsed geo-keys
* @param {DataView} dataView The DataView for the underlying file.
* @param {Boolean} littleEndian Whether the file is encoded in little or big endian
* @param {Boolean} cache Whether or not decoded tiles shall be cached
* @param {Source} source The datasource to read from
*/
constructor(fileDirectory: any, geoKeys: any, dataView: DataView, littleEndian: boolean, cache: boolean, source: Source);
fileDirectory: any;
geoKeys: any;
dataView: DataView;
littleEndian: boolean;
tiles: {} | null;
isTiled: boolean;
planarConfiguration: any;
source: Source;
/**
* Returns the associated parsed file directory.
* @returns {Object} the parsed file directory
*/
getFileDirectory(): any;
/**
* Returns the associated parsed geo keys.
* @returns {Object} the parsed geo keys
*/
getGeoKeys(): any;
/**
* Returns the width of the image.
* @returns {Number} the width of the image
*/
getWidth(): number;
/**
* Returns the height of the image.
* @returns {Number} the height of the image
*/
getHeight(): number;
/**
* Returns the number of samples per pixel.
* @returns {Number} the number of samples per pixel
*/
getSamplesPerPixel(): number;
/**
* Returns the width of each tile.
* @returns {Number} the width of each tile
*/
getTileWidth(): number;
/**
* Returns the height of each tile.
* @returns {Number} the height of each tile
*/
getTileHeight(): number;
getBlockWidth(): number;
getBlockHeight(y: any): number;
/**
* Calculates the number of bytes for each pixel across all samples. Only full
* bytes are supported, an exception is thrown when this is not the case.
* @returns {Number} the bytes per pixel
*/
getBytesPerPixel(): number;
getSampleByteSize(i: any): number;
getReaderForSample(sampleIndex: any): (byteOffset: number, littleEndian?: boolean | undefined) => number;
getSampleFormat(sampleIndex?: number): any;
getBitsPerSample(sampleIndex?: number): any;
getArrayForSample(sampleIndex: any, size: any): Uint8Array | Uint16Array | Uint32Array | Int32Array | Int8Array | Int16Array | Float32Array | Float64Array;
/**
* Returns the decoded strip or tile.
* @param {Number} x the strip or tile x-offset
* @param {Number} y the tile y-offset (0 for stripped images)
* @param {Number} sample the sample to get for separated samples
* @param {import("./geotiff").Pool|AbstractDecoder} poolOrDecoder the decoder or decoder pool
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise.<ArrayBuffer>}
*/
getTileOrStrip(x: number, y: number, sample: number, poolOrDecoder: import("./geotiff").Pool | AbstractDecoder, signal?: AbortSignal | undefined): Promise<ArrayBuffer>;
/**
* Internal read function.
* @private
* @param {Array} imageWindow The image window in pixel coordinates
* @param {Array} samples The selected samples (0-based indices)
* @param {TypedArray[]|TypedArray} valueArrays The array(s) to write into
* @param {Boolean} interleave Whether or not to write in an interleaved manner
* @param {import("./geotiff").Pool|AbstractDecoder} poolOrDecoder the decoder or decoder pool
* @param {number} width the width of window to be read into
* @param {number} height the height of window to be read into
* @param {number} resampleMethod the resampling method to be used when interpolating
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<TypedArray[]>|Promise<TypedArray>}
*/
private _readRaster;
/**
* Reads raster data from the image. This function reads all selected samples
* into separate arrays of the correct type for that sample or into a single
* combined array when `interleave` is set. When provided, only a subset
* of the raster is read for each sample.
*
* @param {ReadRasterOptions} [options={}] optional parameters
* @returns {Promise.<(TypedArray|TypedArray[])>} the decoded arrays as a promise
*/
readRasters({ window: wnd, samples, interleave, pool, width, height, resampleMethod, fillValue, signal, }?: ReadRasterOptions | undefined): Promise<(TypedArray | TypedArray[])>;
/**
* Reads raster data from the image as RGB. The result is always an
* interleaved typed array.
* Colorspaces other than RGB will be transformed to RGB, color maps expanded.
* When no other method is applicable, the first sample is used to produce a
* greayscale image.
* When provided, only a subset of the raster is read for each sample.
*
* @param {Object} [options] optional parameters
* @param {Array<number>} [options.window] the subset to read data from in pixels.
* @param {boolean} [options.interleave=true] whether the data shall be read
* in one single array or separate
* arrays.
* @param {import("./geotiff").Pool} [options.pool=null] The optional decoder pool to use.
* @param {number} [options.width] The desired width of the output. When the width is no the
* same as the images, resampling will be performed.
* @param {number} [options.height] The desired height of the output. When the width is no the
* same as the images, resampling will be performed.
* @param {string} [options.resampleMethod='nearest'] The desired resampling method.
* @param {boolean} [options.enableAlpha=false] Enable reading alpha channel if present.
* @param {AbortSignal} [options.signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<TypedArray|TypedArray[]>} the RGB array as a Promise
*/
readRGB({ window, interleave, pool, width, height, resampleMethod, enableAlpha, signal }?: {
window?: number[] | undefined;
interleave?: boolean | undefined;
pool?: import("./pool.js").default | undefined;
width?: number | undefined;
height?: number | undefined;
resampleMethod?: string | undefined;
enableAlpha?: boolean | undefined;
signal?: AbortSignal | undefined;
} | undefined): Promise<TypedArray | TypedArray[]>;
/**
* Returns an array of tiepoints.
* @returns {Object[]}
*/
getTiePoints(): any[];
/**
* Returns the parsed GDAL metadata items.
*
* If sample is passed to null, dataset-level metadata will be returned.
* Otherwise only metadata specific to the provided sample will be returned.
*
* @param {number} [sample=null] The sample index.
* @returns {Object}
*/
getGDALMetadata(sample?: number | undefined): any;
/**
* Returns the GDAL nodata value
* @returns {number|null}
*/
getGDALNoData(): number | null;
/**
* Returns the image origin as a XYZ-vector. When the image has no affine
* transformation, then an exception is thrown.
* @returns {Array<number>} The origin as a vector
*/
getOrigin(): Array<number>;
/**
* Returns the image resolution as a XYZ-vector. When the image has no affine
* transformation, then an exception is thrown.
* @param {GeoTIFFImage} [referenceImage=null] A reference image to calculate the resolution from
* in cases when the current image does not have the
* required tags on its own.
* @returns {Array<number>} The resolution as a vector
*/
getResolution(referenceImage?: GeoTIFFImage | undefined): Array<number>;
/**
* Returns whether or not the pixels of the image depict an area (or point).
* @returns {Boolean} Whether the pixels are a point
*/
pixelIsArea(): boolean;
/**
* Returns the image bounding box as an array of 4 values: min-x, min-y,
* max-x and max-y. When the image has no affine transformation, then an
* exception is thrown.
* @returns {Array<number>} The bounding box
*/
getBoundingBox(): Array<number>;
}
//# sourceMappingURL=geotiffimage.d.ts.map

1
node_modules/geotiff/dist-node/geotiffimage.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"geotiffimage.d.ts","sourceRoot":"","sources":["../dist-module/geotiffimage.js"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;yBAiCc,OAAO,cAAc,EAAE,UAAU;AA0I/C;;GAEG;AACH;IACE;;;;;;;;OAQG;IACH,wDALW,QAAQ,yDAmBlB;IAbC,mBAAkC;IAClC,aAAsB;IACtB,mBAAwB;IACxB,sBAAgC;IAChC,iBAA8B;IAC9B,iBAA0C;IAE1C,yBAAiG;IAKjG,eAAoB;IAGtB;;;OAGG;IACH,wBAEC;IAED;;;OAGG;IACH,kBAEC;IAED;;;OAGG;IACH,mBAEC;IAED;;;OAGG;IACH,oBAEC;IAED;;;OAGG;IACH,6BAGC;IAED;;;OAGG;IACH,uBAEC;IAED;;;OAGG;IACH,wBAQC;IAED,wBAEC;IAED,+BAMC;IAED;;;;OAIG;IACH,2BAMC;IAED,kCAKC;IAED,yGAyCC;IAED,2CAGC;IAED,4CAEC;IAED,2JAIC;IAED;;;;;;;;;OASG;IACH,oEALW,OAAO,WAAW,EAAE,IAAI,kBAAgB,qCAGtC,QAAS,WAAW,CAAC,CAwDjC;IAED;;;;;;;;;;;;;;OAcG;IACH,oBAkHC;IAED;;;;;;;;OAQG;IACH,4IAFa,QAAS,CAAC,UAAU,GAAC,UAAU,EAAE,CAAC,CAAC,CAyD/C;IAED;;;;;;;;;;;;;;;;;;;;;;;OAuBG;IACH;;;;;;;;;oBAFa,QAAQ,UAAU,GAAC,UAAU,EAAE,CAAC,CA0G5C;IAED;;;OAGG;IACH,gBAFa,KAAQ,CAmBpB;IAED;;;;;;;;OAQG;IACH,kDAoBC;IAED;;;OAGG;IACH,iBAFa,MAAM,GAAC,IAAI,CAQvB;IAED;;;;OAIG;IACH,aAFa,MAAM,MAAM,CAAC,CAoBzB;IAED;;;;;;;OAOG;IACH,0DAFa,MAAM,MAAM,CAAC,CA+BzB;IAED;;;OAGG;IACH,uBAEC;IAED;;;;;OAKG;IACH,kBAFa,MAAM,MAAM,CAAC,CAkBzB;CACF"}

822
node_modules/geotiff/dist-node/geotiffimage.js generated vendored Normal file
View File

@@ -0,0 +1,822 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
/** @module geotiffimage */
const float16_1 = require("@petamoriken/float16");
const get_attribute_js_1 = __importDefault(require("xml-utils/get-attribute.js"));
const find_tags_by_name_js_1 = __importDefault(require("xml-utils/find-tags-by-name.js"));
const globals_js_1 = require("./globals.js");
const rgb_js_1 = require("./rgb.js");
const index_js_1 = require("./compression/index.js");
const resample_js_1 = require("./resample.js");
/**
* @typedef {Object} ReadRasterOptions
* @property {Array<number>} [window=whole window] the subset to read data from in pixels.
* @property {Array<number>} [bbox=whole image] the subset to read data from in
* geographical coordinates.
* @property {Array<number>} [samples=all samples] the selection of samples to read from. Default is all samples.
* @property {boolean} [interleave=false] whether the data shall be read
* in one single array or separate
* arrays.
* @property {Pool} [pool=null] The optional decoder pool to use.
* @property {number} [width] The desired width of the output. When the width is not the
* same as the images, resampling will be performed.
* @property {number} [height] The desired height of the output. When the width is not the
* same as the images, resampling will be performed.
* @property {string} [resampleMethod='nearest'] The desired resampling method.
* @property {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @property {number|number[]} [fillValue] The value to use for parts of the image
* outside of the images extent. When multiple
* samples are requested, an array of fill values
* can be passed.
*/
/** @typedef {import("./geotiff.js").TypedArray} TypedArray */
function sum(array, start, end) {
let s = 0;
for (let i = start; i < end; ++i) {
s += array[i];
}
return s;
}
function arrayForType(format, bitsPerSample, size) {
switch (format) {
case 1: // unsigned integer data
if (bitsPerSample <= 8) {
return new Uint8Array(size);
}
else if (bitsPerSample <= 16) {
return new Uint16Array(size);
}
else if (bitsPerSample <= 32) {
return new Uint32Array(size);
}
break;
case 2: // twos complement signed integer data
if (bitsPerSample === 8) {
return new Int8Array(size);
}
else if (bitsPerSample === 16) {
return new Int16Array(size);
}
else if (bitsPerSample === 32) {
return new Int32Array(size);
}
break;
case 3: // floating point data
switch (bitsPerSample) {
case 16:
case 32:
return new Float32Array(size);
case 64:
return new Float64Array(size);
default:
break;
}
break;
default:
break;
}
throw Error('Unsupported data format/bitsPerSample');
}
function needsNormalization(format, bitsPerSample) {
if ((format === 1 || format === 2) && bitsPerSample <= 32 && bitsPerSample % 8 === 0) {
return false;
}
else if (format === 3 && (bitsPerSample === 16 || bitsPerSample === 32 || bitsPerSample === 64)) {
return false;
}
return true;
}
function normalizeArray(inBuffer, format, planarConfiguration, samplesPerPixel, bitsPerSample, tileWidth, tileHeight) {
// const inByteArray = new Uint8Array(inBuffer);
const view = new DataView(inBuffer);
const outSize = planarConfiguration === 2
? tileHeight * tileWidth
: tileHeight * tileWidth * samplesPerPixel;
const samplesToTransfer = planarConfiguration === 2
? 1 : samplesPerPixel;
const outArray = arrayForType(format, bitsPerSample, outSize);
// let pixel = 0;
const bitMask = parseInt('1'.repeat(bitsPerSample), 2);
if (format === 1) { // unsigned integer
// translation of https://github.com/OSGeo/gdal/blob/master/gdal/frmts/gtiff/geotiff.cpp#L7337
let pixelBitSkip;
// let sampleBitOffset = 0;
if (planarConfiguration === 1) {
pixelBitSkip = samplesPerPixel * bitsPerSample;
// sampleBitOffset = (samplesPerPixel - 1) * bitsPerSample;
}
else {
pixelBitSkip = bitsPerSample;
}
// Bits per line rounds up to next byte boundary.
let bitsPerLine = tileWidth * pixelBitSkip;
if ((bitsPerLine & 7) !== 0) {
bitsPerLine = (bitsPerLine + 7) & (~7);
}
for (let y = 0; y < tileHeight; ++y) {
const lineBitOffset = y * bitsPerLine;
for (let x = 0; x < tileWidth; ++x) {
const pixelBitOffset = lineBitOffset + (x * samplesToTransfer * bitsPerSample);
for (let i = 0; i < samplesToTransfer; ++i) {
const bitOffset = pixelBitOffset + (i * bitsPerSample);
const outIndex = (((y * tileWidth) + x) * samplesToTransfer) + i;
const byteOffset = Math.floor(bitOffset / 8);
const innerBitOffset = bitOffset % 8;
if (innerBitOffset + bitsPerSample <= 8) {
outArray[outIndex] = (view.getUint8(byteOffset) >> (8 - bitsPerSample) - innerBitOffset) & bitMask;
}
else if (innerBitOffset + bitsPerSample <= 16) {
outArray[outIndex] = (view.getUint16(byteOffset) >> (16 - bitsPerSample) - innerBitOffset) & bitMask;
}
else if (innerBitOffset + bitsPerSample <= 24) {
const raw = (view.getUint16(byteOffset) << 8) | (view.getUint8(byteOffset + 2));
outArray[outIndex] = (raw >> (24 - bitsPerSample) - innerBitOffset) & bitMask;
}
else {
outArray[outIndex] = (view.getUint32(byteOffset) >> (32 - bitsPerSample) - innerBitOffset) & bitMask;
}
// let outWord = 0;
// for (let bit = 0; bit < bitsPerSample; ++bit) {
// if (inByteArray[bitOffset >> 3]
// & (0x80 >> (bitOffset & 7))) {
// outWord |= (1 << (bitsPerSample - 1 - bit));
// }
// ++bitOffset;
// }
// outArray[outIndex] = outWord;
// outArray[pixel] = outWord;
// pixel += 1;
}
// bitOffset = bitOffset + pixelBitSkip - bitsPerSample;
}
}
}
else if (format === 3) { // floating point
// Float16 is handled elsewhere
// normalize 16/24 bit floats to 32 bit floats in the array
// console.time();
// if (bitsPerSample === 16) {
// for (let byte = 0, outIndex = 0; byte < inBuffer.byteLength; byte += 2, ++outIndex) {
// outArray[outIndex] = getFloat16(view, byte);
// }
// }
// console.timeEnd()
}
return outArray.buffer;
}
/**
* GeoTIFF sub-file image.
*/
class GeoTIFFImage {
/**
* @constructor
* @param {Object} fileDirectory The parsed file directory
* @param {Object} geoKeys The parsed geo-keys
* @param {DataView} dataView The DataView for the underlying file.
* @param {Boolean} littleEndian Whether the file is encoded in little or big endian
* @param {Boolean} cache Whether or not decoded tiles shall be cached
* @param {Source} source The datasource to read from
*/
constructor(fileDirectory, geoKeys, dataView, littleEndian, cache, source) {
this.fileDirectory = fileDirectory;
this.geoKeys = geoKeys;
this.dataView = dataView;
this.littleEndian = littleEndian;
this.tiles = cache ? {} : null;
this.isTiled = !fileDirectory.StripOffsets;
const planarConfiguration = fileDirectory.PlanarConfiguration;
this.planarConfiguration = (typeof planarConfiguration === 'undefined') ? 1 : planarConfiguration;
if (this.planarConfiguration !== 1 && this.planarConfiguration !== 2) {
throw new Error('Invalid planar configuration.');
}
this.source = source;
}
/**
* Returns the associated parsed file directory.
* @returns {Object} the parsed file directory
*/
getFileDirectory() {
return this.fileDirectory;
}
/**
* Returns the associated parsed geo keys.
* @returns {Object} the parsed geo keys
*/
getGeoKeys() {
return this.geoKeys;
}
/**
* Returns the width of the image.
* @returns {Number} the width of the image
*/
getWidth() {
return this.fileDirectory.ImageWidth;
}
/**
* Returns the height of the image.
* @returns {Number} the height of the image
*/
getHeight() {
return this.fileDirectory.ImageLength;
}
/**
* Returns the number of samples per pixel.
* @returns {Number} the number of samples per pixel
*/
getSamplesPerPixel() {
return typeof this.fileDirectory.SamplesPerPixel !== 'undefined'
? this.fileDirectory.SamplesPerPixel : 1;
}
/**
* Returns the width of each tile.
* @returns {Number} the width of each tile
*/
getTileWidth() {
return this.isTiled ? this.fileDirectory.TileWidth : this.getWidth();
}
/**
* Returns the height of each tile.
* @returns {Number} the height of each tile
*/
getTileHeight() {
if (this.isTiled) {
return this.fileDirectory.TileLength;
}
if (typeof this.fileDirectory.RowsPerStrip !== 'undefined') {
return Math.min(this.fileDirectory.RowsPerStrip, this.getHeight());
}
return this.getHeight();
}
getBlockWidth() {
return this.getTileWidth();
}
getBlockHeight(y) {
if (this.isTiled || (y + 1) * this.getTileHeight() <= this.getHeight()) {
return this.getTileHeight();
}
else {
return this.getHeight() - (y * this.getTileHeight());
}
}
/**
* Calculates the number of bytes for each pixel across all samples. Only full
* bytes are supported, an exception is thrown when this is not the case.
* @returns {Number} the bytes per pixel
*/
getBytesPerPixel() {
let bytes = 0;
for (let i = 0; i < this.fileDirectory.BitsPerSample.length; ++i) {
bytes += this.getSampleByteSize(i);
}
return bytes;
}
getSampleByteSize(i) {
if (i >= this.fileDirectory.BitsPerSample.length) {
throw new RangeError(`Sample index ${i} is out of range.`);
}
return Math.ceil(this.fileDirectory.BitsPerSample[i] / 8);
}
getReaderForSample(sampleIndex) {
const format = this.fileDirectory.SampleFormat
? this.fileDirectory.SampleFormat[sampleIndex] : 1;
const bitsPerSample = this.fileDirectory.BitsPerSample[sampleIndex];
switch (format) {
case 1: // unsigned integer data
if (bitsPerSample <= 8) {
return DataView.prototype.getUint8;
}
else if (bitsPerSample <= 16) {
return DataView.prototype.getUint16;
}
else if (bitsPerSample <= 32) {
return DataView.prototype.getUint32;
}
break;
case 2: // twos complement signed integer data
if (bitsPerSample <= 8) {
return DataView.prototype.getInt8;
}
else if (bitsPerSample <= 16) {
return DataView.prototype.getInt16;
}
else if (bitsPerSample <= 32) {
return DataView.prototype.getInt32;
}
break;
case 3:
switch (bitsPerSample) {
case 16:
return function (offset, littleEndian) {
return (0, float16_1.getFloat16)(this, offset, littleEndian);
};
case 32:
return DataView.prototype.getFloat32;
case 64:
return DataView.prototype.getFloat64;
default:
break;
}
break;
default:
break;
}
throw Error('Unsupported data format/bitsPerSample');
}
getSampleFormat(sampleIndex = 0) {
return this.fileDirectory.SampleFormat
? this.fileDirectory.SampleFormat[sampleIndex] : 1;
}
getBitsPerSample(sampleIndex = 0) {
return this.fileDirectory.BitsPerSample[sampleIndex];
}
getArrayForSample(sampleIndex, size) {
const format = this.getSampleFormat(sampleIndex);
const bitsPerSample = this.getBitsPerSample(sampleIndex);
return arrayForType(format, bitsPerSample, size);
}
/**
* Returns the decoded strip or tile.
* @param {Number} x the strip or tile x-offset
* @param {Number} y the tile y-offset (0 for stripped images)
* @param {Number} sample the sample to get for separated samples
* @param {import("./geotiff").Pool|AbstractDecoder} poolOrDecoder the decoder or decoder pool
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise.<ArrayBuffer>}
*/
async getTileOrStrip(x, y, sample, poolOrDecoder, signal) {
const numTilesPerRow = Math.ceil(this.getWidth() / this.getTileWidth());
const numTilesPerCol = Math.ceil(this.getHeight() / this.getTileHeight());
let index;
const { tiles } = this;
if (this.planarConfiguration === 1) {
index = (y * numTilesPerRow) + x;
}
else if (this.planarConfiguration === 2) {
index = (sample * numTilesPerRow * numTilesPerCol) + (y * numTilesPerRow) + x;
}
let offset;
let byteCount;
if (this.isTiled) {
offset = this.fileDirectory.TileOffsets[index];
byteCount = this.fileDirectory.TileByteCounts[index];
}
else {
offset = this.fileDirectory.StripOffsets[index];
byteCount = this.fileDirectory.StripByteCounts[index];
}
const slice = (await this.source.fetch([{ offset, length: byteCount }], signal))[0];
let request;
if (tiles === null || !tiles[index]) {
// resolve each request by potentially applying array normalization
request = (async () => {
let data = await poolOrDecoder.decode(this.fileDirectory, slice);
const sampleFormat = this.getSampleFormat();
const bitsPerSample = this.getBitsPerSample();
if (needsNormalization(sampleFormat, bitsPerSample)) {
data = normalizeArray(data, sampleFormat, this.planarConfiguration, this.getSamplesPerPixel(), bitsPerSample, this.getTileWidth(), this.getBlockHeight(y));
}
return data;
})();
// set the cache
if (tiles !== null) {
tiles[index] = request;
}
}
else {
// get from the cache
request = tiles[index];
}
// cache the tile request
return { x, y, sample, data: await request };
}
/**
* Internal read function.
* @private
* @param {Array} imageWindow The image window in pixel coordinates
* @param {Array} samples The selected samples (0-based indices)
* @param {TypedArray[]|TypedArray} valueArrays The array(s) to write into
* @param {Boolean} interleave Whether or not to write in an interleaved manner
* @param {import("./geotiff").Pool|AbstractDecoder} poolOrDecoder the decoder or decoder pool
* @param {number} width the width of window to be read into
* @param {number} height the height of window to be read into
* @param {number} resampleMethod the resampling method to be used when interpolating
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<TypedArray[]>|Promise<TypedArray>}
*/
async _readRaster(imageWindow, samples, valueArrays, interleave, poolOrDecoder, width, height, resampleMethod, signal) {
const tileWidth = this.getTileWidth();
const tileHeight = this.getTileHeight();
const imageWidth = this.getWidth();
const imageHeight = this.getHeight();
const minXTile = Math.max(Math.floor(imageWindow[0] / tileWidth), 0);
const maxXTile = Math.min(Math.ceil(imageWindow[2] / tileWidth), Math.ceil(imageWidth / tileWidth));
const minYTile = Math.max(Math.floor(imageWindow[1] / tileHeight), 0);
const maxYTile = Math.min(Math.ceil(imageWindow[3] / tileHeight), Math.ceil(imageHeight / tileHeight));
const windowWidth = imageWindow[2] - imageWindow[0];
let bytesPerPixel = this.getBytesPerPixel();
const srcSampleOffsets = [];
const sampleReaders = [];
for (let i = 0; i < samples.length; ++i) {
if (this.planarConfiguration === 1) {
srcSampleOffsets.push(sum(this.fileDirectory.BitsPerSample, 0, samples[i]) / 8);
}
else {
srcSampleOffsets.push(0);
}
sampleReaders.push(this.getReaderForSample(samples[i]));
}
const promises = [];
const { littleEndian } = this;
for (let yTile = minYTile; yTile < maxYTile; ++yTile) {
for (let xTile = minXTile; xTile < maxXTile; ++xTile) {
for (let sampleIndex = 0; sampleIndex < samples.length; ++sampleIndex) {
const si = sampleIndex;
const sample = samples[sampleIndex];
if (this.planarConfiguration === 2) {
bytesPerPixel = this.getSampleByteSize(sampleIndex);
}
const promise = this.getTileOrStrip(xTile, yTile, sample, poolOrDecoder, signal);
promises.push(promise);
promise.then((tile) => {
const buffer = tile.data;
const dataView = new DataView(buffer);
const blockHeight = this.getBlockHeight(tile.y);
const firstLine = tile.y * tileHeight;
const firstCol = tile.x * tileWidth;
const lastLine = firstLine + blockHeight;
const lastCol = (tile.x + 1) * tileWidth;
const reader = sampleReaders[si];
const ymax = Math.min(blockHeight, blockHeight - (lastLine - imageWindow[3]), imageHeight - firstLine);
const xmax = Math.min(tileWidth, tileWidth - (lastCol - imageWindow[2]), imageWidth - firstCol);
for (let y = Math.max(0, imageWindow[1] - firstLine); y < ymax; ++y) {
for (let x = Math.max(0, imageWindow[0] - firstCol); x < xmax; ++x) {
const pixelOffset = ((y * tileWidth) + x) * bytesPerPixel;
const value = reader.call(dataView, pixelOffset + srcSampleOffsets[si], littleEndian);
let windowCoordinate;
if (interleave) {
windowCoordinate = ((y + firstLine - imageWindow[1]) * windowWidth * samples.length)
+ ((x + firstCol - imageWindow[0]) * samples.length)
+ si;
valueArrays[windowCoordinate] = value;
}
else {
windowCoordinate = ((y + firstLine - imageWindow[1]) * windowWidth) + x + firstCol - imageWindow[0];
valueArrays[si][windowCoordinate] = value;
}
}
}
});
}
}
}
await Promise.all(promises);
if ((width && (imageWindow[2] - imageWindow[0]) !== width)
|| (height && (imageWindow[3] - imageWindow[1]) !== height)) {
let resampled;
if (interleave) {
resampled = (0, resample_js_1.resampleInterleaved)(valueArrays, imageWindow[2] - imageWindow[0], imageWindow[3] - imageWindow[1], width, height, samples.length, resampleMethod);
}
else {
resampled = (0, resample_js_1.resample)(valueArrays, imageWindow[2] - imageWindow[0], imageWindow[3] - imageWindow[1], width, height, resampleMethod);
}
resampled.width = width;
resampled.height = height;
return resampled;
}
valueArrays.width = width || imageWindow[2] - imageWindow[0];
valueArrays.height = height || imageWindow[3] - imageWindow[1];
return valueArrays;
}
/**
* Reads raster data from the image. This function reads all selected samples
* into separate arrays of the correct type for that sample or into a single
* combined array when `interleave` is set. When provided, only a subset
* of the raster is read for each sample.
*
* @param {ReadRasterOptions} [options={}] optional parameters
* @returns {Promise.<(TypedArray|TypedArray[])>} the decoded arrays as a promise
*/
async readRasters({ window: wnd, samples = [], interleave, pool = null, width, height, resampleMethod, fillValue, signal, } = {}) {
const imageWindow = wnd || [0, 0, this.getWidth(), this.getHeight()];
// check parameters
if (imageWindow[0] > imageWindow[2] || imageWindow[1] > imageWindow[3]) {
throw new Error('Invalid subsets');
}
const imageWindowWidth = imageWindow[2] - imageWindow[0];
const imageWindowHeight = imageWindow[3] - imageWindow[1];
const numPixels = imageWindowWidth * imageWindowHeight;
const samplesPerPixel = this.getSamplesPerPixel();
if (!samples || !samples.length) {
for (let i = 0; i < samplesPerPixel; ++i) {
samples.push(i);
}
}
else {
for (let i = 0; i < samples.length; ++i) {
if (samples[i] >= samplesPerPixel) {
return Promise.reject(new RangeError(`Invalid sample index '${samples[i]}'.`));
}
}
}
let valueArrays;
if (interleave) {
const format = this.fileDirectory.SampleFormat
? Math.max.apply(null, this.fileDirectory.SampleFormat) : 1;
const bitsPerSample = Math.max.apply(null, this.fileDirectory.BitsPerSample);
valueArrays = arrayForType(format, bitsPerSample, numPixels * samples.length);
if (fillValue) {
valueArrays.fill(fillValue);
}
}
else {
valueArrays = [];
for (let i = 0; i < samples.length; ++i) {
const valueArray = this.getArrayForSample(samples[i], numPixels);
if (Array.isArray(fillValue) && i < fillValue.length) {
valueArray.fill(fillValue[i]);
}
else if (fillValue && !Array.isArray(fillValue)) {
valueArray.fill(fillValue);
}
valueArrays.push(valueArray);
}
}
const poolOrDecoder = pool || await (0, index_js_1.getDecoder)(this.fileDirectory);
const result = await this._readRaster(imageWindow, samples, valueArrays, interleave, poolOrDecoder, width, height, resampleMethod, signal);
return result;
}
/**
* Reads raster data from the image as RGB. The result is always an
* interleaved typed array.
* Colorspaces other than RGB will be transformed to RGB, color maps expanded.
* When no other method is applicable, the first sample is used to produce a
* greayscale image.
* When provided, only a subset of the raster is read for each sample.
*
* @param {Object} [options] optional parameters
* @param {Array<number>} [options.window] the subset to read data from in pixels.
* @param {boolean} [options.interleave=true] whether the data shall be read
* in one single array or separate
* arrays.
* @param {import("./geotiff").Pool} [options.pool=null] The optional decoder pool to use.
* @param {number} [options.width] The desired width of the output. When the width is no the
* same as the images, resampling will be performed.
* @param {number} [options.height] The desired height of the output. When the width is no the
* same as the images, resampling will be performed.
* @param {string} [options.resampleMethod='nearest'] The desired resampling method.
* @param {boolean} [options.enableAlpha=false] Enable reading alpha channel if present.
* @param {AbortSignal} [options.signal] An AbortSignal that may be signalled if the request is
* to be aborted
* @returns {Promise<TypedArray|TypedArray[]>} the RGB array as a Promise
*/
async readRGB({ window, interleave = true, pool = null, width, height, resampleMethod, enableAlpha = false, signal } = {}) {
const imageWindow = window || [0, 0, this.getWidth(), this.getHeight()];
// check parameters
if (imageWindow[0] > imageWindow[2] || imageWindow[1] > imageWindow[3]) {
throw new Error('Invalid subsets');
}
const pi = this.fileDirectory.PhotometricInterpretation;
if (pi === globals_js_1.photometricInterpretations.RGB) {
let s = [0, 1, 2];
if ((!(this.fileDirectory.ExtraSamples === globals_js_1.ExtraSamplesValues.Unspecified)) && enableAlpha) {
s = [];
for (let i = 0; i < this.fileDirectory.BitsPerSample.length; i += 1) {
s.push(i);
}
}
return this.readRasters({
window,
interleave,
samples: s,
pool,
width,
height,
resampleMethod,
signal,
});
}
let samples;
switch (pi) {
case globals_js_1.photometricInterpretations.WhiteIsZero:
case globals_js_1.photometricInterpretations.BlackIsZero:
case globals_js_1.photometricInterpretations.Palette:
samples = [0];
break;
case globals_js_1.photometricInterpretations.CMYK:
samples = [0, 1, 2, 3];
break;
case globals_js_1.photometricInterpretations.YCbCr:
case globals_js_1.photometricInterpretations.CIELab:
samples = [0, 1, 2];
break;
default:
throw new Error('Invalid or unsupported photometric interpretation.');
}
const subOptions = {
window: imageWindow,
interleave: true,
samples,
pool,
width,
height,
resampleMethod,
signal,
};
const { fileDirectory } = this;
const raster = await this.readRasters(subOptions);
const max = 2 ** this.fileDirectory.BitsPerSample[0];
let data;
switch (pi) {
case globals_js_1.photometricInterpretations.WhiteIsZero:
data = (0, rgb_js_1.fromWhiteIsZero)(raster, max);
break;
case globals_js_1.photometricInterpretations.BlackIsZero:
data = (0, rgb_js_1.fromBlackIsZero)(raster, max);
break;
case globals_js_1.photometricInterpretations.Palette:
data = (0, rgb_js_1.fromPalette)(raster, fileDirectory.ColorMap);
break;
case globals_js_1.photometricInterpretations.CMYK:
data = (0, rgb_js_1.fromCMYK)(raster);
break;
case globals_js_1.photometricInterpretations.YCbCr:
data = (0, rgb_js_1.fromYCbCr)(raster);
break;
case globals_js_1.photometricInterpretations.CIELab:
data = (0, rgb_js_1.fromCIELab)(raster);
break;
default:
throw new Error('Unsupported photometric interpretation.');
}
// if non-interleaved data is requested, we must split the channels
// into their respective arrays
if (!interleave) {
const red = new Uint8Array(data.length / 3);
const green = new Uint8Array(data.length / 3);
const blue = new Uint8Array(data.length / 3);
for (let i = 0, j = 0; i < data.length; i += 3, ++j) {
red[j] = data[i];
green[j] = data[i + 1];
blue[j] = data[i + 2];
}
data = [red, green, blue];
}
data.width = raster.width;
data.height = raster.height;
return data;
}
/**
* Returns an array of tiepoints.
* @returns {Object[]}
*/
getTiePoints() {
if (!this.fileDirectory.ModelTiepoint) {
return [];
}
const tiePoints = [];
for (let i = 0; i < this.fileDirectory.ModelTiepoint.length; i += 6) {
tiePoints.push({
i: this.fileDirectory.ModelTiepoint[i],
j: this.fileDirectory.ModelTiepoint[i + 1],
k: this.fileDirectory.ModelTiepoint[i + 2],
x: this.fileDirectory.ModelTiepoint[i + 3],
y: this.fileDirectory.ModelTiepoint[i + 4],
z: this.fileDirectory.ModelTiepoint[i + 5],
});
}
return tiePoints;
}
/**
* Returns the parsed GDAL metadata items.
*
* If sample is passed to null, dataset-level metadata will be returned.
* Otherwise only metadata specific to the provided sample will be returned.
*
* @param {number} [sample=null] The sample index.
* @returns {Object}
*/
getGDALMetadata(sample = null) {
const metadata = {};
if (!this.fileDirectory.GDAL_METADATA) {
return null;
}
const string = this.fileDirectory.GDAL_METADATA;
let items = (0, find_tags_by_name_js_1.default)(string, 'Item');
if (sample === null) {
items = items.filter((item) => (0, get_attribute_js_1.default)(item, 'sample') === undefined);
}
else {
items = items.filter((item) => Number((0, get_attribute_js_1.default)(item, 'sample')) === sample);
}
for (let i = 0; i < items.length; ++i) {
const item = items[i];
metadata[(0, get_attribute_js_1.default)(item, 'name')] = item.inner;
}
return metadata;
}
/**
* Returns the GDAL nodata value
* @returns {number|null}
*/
getGDALNoData() {
if (!this.fileDirectory.GDAL_NODATA) {
return null;
}
const string = this.fileDirectory.GDAL_NODATA;
return Number(string.substring(0, string.length - 1));
}
/**
* Returns the image origin as a XYZ-vector. When the image has no affine
* transformation, then an exception is thrown.
* @returns {Array<number>} The origin as a vector
*/
getOrigin() {
const tiePoints = this.fileDirectory.ModelTiepoint;
const modelTransformation = this.fileDirectory.ModelTransformation;
if (tiePoints && tiePoints.length === 6) {
return [
tiePoints[3],
tiePoints[4],
tiePoints[5],
];
}
if (modelTransformation) {
return [
modelTransformation[3],
modelTransformation[7],
modelTransformation[11],
];
}
throw new Error('The image does not have an affine transformation.');
}
/**
* Returns the image resolution as a XYZ-vector. When the image has no affine
* transformation, then an exception is thrown.
* @param {GeoTIFFImage} [referenceImage=null] A reference image to calculate the resolution from
* in cases when the current image does not have the
* required tags on its own.
* @returns {Array<number>} The resolution as a vector
*/
getResolution(referenceImage = null) {
const modelPixelScale = this.fileDirectory.ModelPixelScale;
const modelTransformation = this.fileDirectory.ModelTransformation;
if (modelPixelScale) {
return [
modelPixelScale[0],
-modelPixelScale[1],
modelPixelScale[2],
];
}
if (modelTransformation) {
return [
modelTransformation[0],
modelTransformation[5],
modelTransformation[10],
];
}
if (referenceImage) {
const [refResX, refResY, refResZ] = referenceImage.getResolution();
return [
refResX * referenceImage.getWidth() / this.getWidth(),
refResY * referenceImage.getHeight() / this.getHeight(),
refResZ * referenceImage.getWidth() / this.getWidth(),
];
}
throw new Error('The image does not have an affine transformation.');
}
/**
* Returns whether or not the pixels of the image depict an area (or point).
* @returns {Boolean} Whether the pixels are a point
*/
pixelIsArea() {
return this.geoKeys.GTRasterTypeGeoKey === 1;
}
/**
* Returns the image bounding box as an array of 4 values: min-x, min-y,
* max-x and max-y. When the image has no affine transformation, then an
* exception is thrown.
* @returns {Array<number>} The bounding box
*/
getBoundingBox() {
const origin = this.getOrigin();
const resolution = this.getResolution();
const x1 = origin[0];
const y1 = origin[1];
const x2 = x1 + (resolution[0] * this.getWidth());
const y2 = y1 + (resolution[1] * this.getHeight());
return [
Math.min(x1, x2),
Math.min(y1, y2),
Math.max(x1, x2),
Math.max(y1, y2),
];
}
}
exports.default = GeoTIFFImage;
//# sourceMappingURL=geotiffimage.js.map

1
node_modules/geotiff/dist-node/geotiffimage.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

2
node_modules/geotiff/dist-node/geotiffwriter.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
export function writeGeotiff(data: any, metadata: any): ArrayBufferLike;
//# sourceMappingURL=geotiffwriter.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"geotiffwriter.d.ts","sourceRoot":"","sources":["../dist-module/geotiffwriter.js"],"names":[],"mappings":"AAuSA,wEAgKC"}

400
node_modules/geotiff/dist-node/geotiffwriter.js generated vendored Normal file
View File

@@ -0,0 +1,400 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.writeGeotiff = void 0;
/*
Some parts of this file are based on UTIF.js,
which was released under the MIT License.
You can view that here:
https://github.com/photopea/UTIF.js/blob/master/LICENSE
*/
const globals_js_1 = require("./globals.js");
const utils_js_1 = require("./utils.js");
const tagName2Code = (0, utils_js_1.invert)(globals_js_1.fieldTagNames);
const geoKeyName2Code = (0, utils_js_1.invert)(globals_js_1.geoKeyNames);
const name2code = {};
(0, utils_js_1.assign)(name2code, tagName2Code);
(0, utils_js_1.assign)(name2code, geoKeyName2Code);
const typeName2byte = (0, utils_js_1.invert)(globals_js_1.fieldTypeNames);
// config variables
const numBytesInIfd = 1000;
const _binBE = {
nextZero: (data, o) => {
let oincr = o;
while (data[oincr] !== 0) {
oincr++;
}
return oincr;
},
readUshort: (buff, p) => {
return (buff[p] << 8) | buff[p + 1];
},
readShort: (buff, p) => {
const a = _binBE.ui8;
a[0] = buff[p + 1];
a[1] = buff[p + 0];
return _binBE.i16[0];
},
readInt: (buff, p) => {
const a = _binBE.ui8;
a[0] = buff[p + 3];
a[1] = buff[p + 2];
a[2] = buff[p + 1];
a[3] = buff[p + 0];
return _binBE.i32[0];
},
readUint: (buff, p) => {
const a = _binBE.ui8;
a[0] = buff[p + 3];
a[1] = buff[p + 2];
a[2] = buff[p + 1];
a[3] = buff[p + 0];
return _binBE.ui32[0];
},
readASCII: (buff, p, l) => {
return l.map((i) => String.fromCharCode(buff[p + i])).join('');
},
readFloat: (buff, p) => {
const a = _binBE.ui8;
(0, utils_js_1.times)(4, (i) => {
a[i] = buff[p + 3 - i];
});
return _binBE.fl32[0];
},
readDouble: (buff, p) => {
const a = _binBE.ui8;
(0, utils_js_1.times)(8, (i) => {
a[i] = buff[p + 7 - i];
});
return _binBE.fl64[0];
},
writeUshort: (buff, p, n) => {
buff[p] = (n >> 8) & 255;
buff[p + 1] = n & 255;
},
writeUint: (buff, p, n) => {
buff[p] = (n >> 24) & 255;
buff[p + 1] = (n >> 16) & 255;
buff[p + 2] = (n >> 8) & 255;
buff[p + 3] = (n >> 0) & 255;
},
writeASCII: (buff, p, s) => {
(0, utils_js_1.times)(s.length, (i) => {
buff[p + i] = s.charCodeAt(i);
});
},
ui8: new Uint8Array(8),
};
_binBE.fl64 = new Float64Array(_binBE.ui8.buffer);
_binBE.writeDouble = (buff, p, n) => {
_binBE.fl64[0] = n;
(0, utils_js_1.times)(8, (i) => {
buff[p + i] = _binBE.ui8[7 - i];
});
};
const _writeIFD = (bin, data, _offset, ifd) => {
let offset = _offset;
const keys = Object.keys(ifd).filter((key) => {
return key !== undefined && key !== null && key !== 'undefined';
});
bin.writeUshort(data, offset, keys.length);
offset += 2;
let eoff = offset + (12 * keys.length) + 4;
for (const key of keys) {
let tag = null;
if (typeof key === 'number') {
tag = key;
}
else if (typeof key === 'string') {
tag = parseInt(key, 10);
}
const typeName = globals_js_1.fieldTagTypes[tag];
const typeNum = typeName2byte[typeName];
if (typeName == null || typeName === undefined || typeof typeName === 'undefined') {
throw new Error(`unknown type of tag: ${tag}`);
}
let val = ifd[key];
if (val === undefined) {
throw new Error(`failed to get value for key ${key}`);
}
// ASCIIZ format with trailing 0 character
// http://www.fileformat.info/format/tiff/corion.htm
// https://stackoverflow.com/questions/7783044/whats-the-difference-between-asciiz-vs-ascii
if (typeName === 'ASCII' && typeof val === 'string' && (0, utils_js_1.endsWith)(val, '\u0000') === false) {
val += '\u0000';
}
const num = val.length;
bin.writeUshort(data, offset, tag);
offset += 2;
bin.writeUshort(data, offset, typeNum);
offset += 2;
bin.writeUint(data, offset, num);
offset += 4;
let dlen = [-1, 1, 1, 2, 4, 8, 0, 0, 0, 0, 0, 0, 8][typeNum] * num;
let toff = offset;
if (dlen > 4) {
bin.writeUint(data, offset, eoff);
toff = eoff;
}
if (typeName === 'ASCII') {
bin.writeASCII(data, toff, val);
}
else if (typeName === 'SHORT') {
(0, utils_js_1.times)(num, (i) => {
bin.writeUshort(data, toff + (2 * i), val[i]);
});
}
else if (typeName === 'LONG') {
(0, utils_js_1.times)(num, (i) => {
bin.writeUint(data, toff + (4 * i), val[i]);
});
}
else if (typeName === 'RATIONAL') {
(0, utils_js_1.times)(num, (i) => {
bin.writeUint(data, toff + (8 * i), Math.round(val[i] * 10000));
bin.writeUint(data, toff + (8 * i) + 4, 10000);
});
}
else if (typeName === 'DOUBLE') {
(0, utils_js_1.times)(num, (i) => {
bin.writeDouble(data, toff + (8 * i), val[i]);
});
}
if (dlen > 4) {
dlen += (dlen & 1);
eoff += dlen;
}
offset += 4;
}
return [offset, eoff];
};
const encodeIfds = (ifds) => {
const data = new Uint8Array(numBytesInIfd);
let offset = 4;
const bin = _binBE;
// set big-endian byte-order
// https://en.wikipedia.org/wiki/TIFF#Byte_order
data[0] = 77;
data[1] = 77;
// set format-version number
// https://en.wikipedia.org/wiki/TIFF#Byte_order
data[3] = 42;
let ifdo = 8;
bin.writeUint(data, offset, ifdo);
offset += 4;
ifds.forEach((ifd, i) => {
const noffs = _writeIFD(bin, data, ifdo, ifd);
ifdo = noffs[1];
if (i < ifds.length - 1) {
bin.writeUint(data, noffs[0], ifdo);
}
});
if (data.slice) {
return data.slice(0, ifdo).buffer;
}
// node hasn't implemented slice on Uint8Array yet
const result = new Uint8Array(ifdo);
for (let i = 0; i < ifdo; i++) {
result[i] = data[i];
}
return result.buffer;
};
const encodeImage = (values, width, height, metadata) => {
if (height === undefined || height === null) {
throw new Error(`you passed into encodeImage a width of type ${height}`);
}
if (width === undefined || width === null) {
throw new Error(`you passed into encodeImage a width of type ${width}`);
}
const ifd = {
256: [width],
257: [height],
273: [numBytesInIfd],
278: [height],
305: 'geotiff.js', // no array for ASCII(Z)
};
if (metadata) {
for (const i in metadata) {
if (metadata.hasOwnProperty(i)) {
ifd[i] = metadata[i];
}
}
}
const prfx = new Uint8Array(encodeIfds([ifd]));
const img = new Uint8Array(values);
const samplesPerPixel = ifd[277];
const data = new Uint8Array(numBytesInIfd + (width * height * samplesPerPixel));
(0, utils_js_1.times)(prfx.length, (i) => {
data[i] = prfx[i];
});
(0, utils_js_1.forEach)(img, (value, i) => {
data[numBytesInIfd + i] = value;
});
return data.buffer;
};
const convertToTids = (input) => {
const result = {};
for (const key in input) {
if (key !== 'StripOffsets') {
if (!name2code[key]) {
console.error(key, 'not in name2code:', Object.keys(name2code));
}
result[name2code[key]] = input[key];
}
}
return result;
};
const toArray = (input) => {
if (Array.isArray(input)) {
return input;
}
return [input];
};
const metadataDefaults = [
['Compression', 1],
['PlanarConfiguration', 1],
['ExtraSamples', 0],
];
function writeGeotiff(data, metadata) {
const isFlattened = typeof data[0] === 'number';
let height;
let numBands;
let width;
let flattenedValues;
if (isFlattened) {
height = metadata.height || metadata.ImageLength;
width = metadata.width || metadata.ImageWidth;
numBands = data.length / (height * width);
flattenedValues = data;
}
else {
numBands = data.length;
height = data[0].length;
width = data[0][0].length;
flattenedValues = [];
(0, utils_js_1.times)(height, (rowIndex) => {
(0, utils_js_1.times)(width, (columnIndex) => {
(0, utils_js_1.times)(numBands, (bandIndex) => {
flattenedValues.push(data[bandIndex][rowIndex][columnIndex]);
});
});
});
}
metadata.ImageLength = height;
delete metadata.height;
metadata.ImageWidth = width;
delete metadata.width;
// consult https://www.loc.gov/preservation/digital/formats/content/tiff_tags.shtml
if (!metadata.BitsPerSample) {
metadata.BitsPerSample = (0, utils_js_1.times)(numBands, () => 8);
}
metadataDefaults.forEach((tag) => {
const key = tag[0];
if (!metadata[key]) {
const value = tag[1];
metadata[key] = value;
}
});
// The color space of the image data.
// 1=black is zero and 2=RGB.
if (!metadata.PhotometricInterpretation) {
metadata.PhotometricInterpretation = metadata.BitsPerSample.length === 3 ? 2 : 1;
}
// The number of components per pixel.
if (!metadata.SamplesPerPixel) {
metadata.SamplesPerPixel = [numBands];
}
if (!metadata.StripByteCounts) {
// we are only writing one strip
metadata.StripByteCounts = [numBands * height * width];
}
if (!metadata.ModelPixelScale) {
// assumes raster takes up exactly the whole globe
metadata.ModelPixelScale = [360 / width, 180 / height, 0];
}
if (!metadata.SampleFormat) {
metadata.SampleFormat = (0, utils_js_1.times)(numBands, () => 1);
}
// if didn't pass in projection information, assume the popular 4326 "geographic projection"
if (!metadata.hasOwnProperty('GeographicTypeGeoKey') && !metadata.hasOwnProperty('ProjectedCSTypeGeoKey')) {
metadata.GeographicTypeGeoKey = 4326;
metadata.ModelTiepoint = [0, 0, 0, -180, 90, 0]; // raster fits whole globe
metadata.GeogCitationGeoKey = 'WGS 84';
metadata.GTModelTypeGeoKey = 2;
}
const geoKeys = Object.keys(metadata)
.filter((key) => (0, utils_js_1.endsWith)(key, 'GeoKey'))
.sort((a, b) => name2code[a] - name2code[b]);
if (!metadata.GeoAsciiParams) {
let geoAsciiParams = '';
geoKeys.forEach((name) => {
const code = Number(name2code[name]);
const tagType = globals_js_1.fieldTagTypes[code];
if (tagType === 'ASCII') {
geoAsciiParams += `${metadata[name].toString()}\u0000`;
}
});
if (geoAsciiParams.length > 0) {
metadata.GeoAsciiParams = geoAsciiParams;
}
}
if (!metadata.GeoKeyDirectory) {
const NumberOfKeys = geoKeys.length;
const GeoKeyDirectory = [1, 1, 0, NumberOfKeys];
geoKeys.forEach((geoKey) => {
const KeyID = Number(name2code[geoKey]);
GeoKeyDirectory.push(KeyID);
let Count;
let TIFFTagLocation;
let valueOffset;
if (globals_js_1.fieldTagTypes[KeyID] === 'SHORT') {
Count = 1;
TIFFTagLocation = 0;
valueOffset = metadata[geoKey];
}
else if (geoKey === 'GeogCitationGeoKey') {
Count = metadata.GeoAsciiParams.length;
TIFFTagLocation = Number(name2code.GeoAsciiParams);
valueOffset = 0;
}
else {
console.log(`[geotiff.js] couldn't get TIFFTagLocation for ${geoKey}`);
}
GeoKeyDirectory.push(TIFFTagLocation);
GeoKeyDirectory.push(Count);
GeoKeyDirectory.push(valueOffset);
});
metadata.GeoKeyDirectory = GeoKeyDirectory;
}
// delete GeoKeys from metadata, because stored in GeoKeyDirectory tag
for (const geoKey in geoKeys) {
if (geoKeys.hasOwnProperty(geoKey)) {
delete metadata[geoKey];
}
}
[
'Compression',
'ExtraSamples',
'GeographicTypeGeoKey',
'GTModelTypeGeoKey',
'GTRasterTypeGeoKey',
'ImageLength',
'ImageWidth',
'Orientation',
'PhotometricInterpretation',
'ProjectedCSTypeGeoKey',
'PlanarConfiguration',
'ResolutionUnit',
'SamplesPerPixel',
'XPosition',
'YPosition',
].forEach((name) => {
if (metadata[name]) {
metadata[name] = toArray(metadata[name]);
}
});
const encodedMetadata = convertToTids(metadata);
const outputImage = encodeImage(flattenedValues, width, height, encodedMetadata);
return outputImage;
}
exports.writeGeotiff = writeGeotiff;
//# sourceMappingURL=geotiffwriter.js.map

1
node_modules/geotiff/dist-node/geotiffwriter.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

236
node_modules/geotiff/dist-node/globals.d.ts generated vendored Normal file
View File

@@ -0,0 +1,236 @@
export const fieldTagNames: {
315: string;
258: string;
265: string;
264: string;
320: string;
259: string;
33432: string;
306: string;
338: string;
266: string;
289: string;
288: string;
291: string;
290: string;
316: string;
270: string;
257: string;
256: string;
271: string;
281: string;
280: string;
272: string;
254: string;
274: string;
262: string;
284: string;
296: string;
278: string;
277: string;
305: string;
279: string;
273: string;
255: string;
263: string;
282: string;
283: string;
326: string;
327: string;
343: string;
328: string;
433: string;
434: string;
269: string;
336: string;
321: string;
346: string;
347: string;
285: string;
297: string;
317: string;
319: string;
532: string;
339: string;
340: string;
341: string;
559: string;
330: string;
292: string;
293: string;
325: string;
323: string;
324: string;
322: string;
301: string;
318: string;
344: string;
286: string;
529: string;
531: string;
530: string;
345: string;
287: string;
37378: string;
40961: string;
36868: string;
36867: string;
34665: string;
36864: string;
33434: string;
41728: string;
37385: string;
40960: string;
33437: string;
42016: string;
37384: string;
37500: string;
37377: string;
37510: string;
33723: string;
34675: string;
700: string;
42112: string;
42113: string;
34377: string;
33550: string;
33922: string;
34264: string;
34735: string;
34736: string;
34737: string;
50674: string;
};
export const fieldTags: {};
export const fieldTagTypes: {
256: string;
257: string;
258: string;
259: string;
262: string;
273: string;
274: string;
277: string;
278: string;
279: string;
282: string;
283: string;
284: string;
286: string;
287: string;
296: string;
297: string;
305: string;
306: string;
338: string;
339: string;
513: string;
514: string;
1024: string;
1025: string;
2048: string;
2049: string;
3072: string;
3073: string;
33550: string;
33922: string;
34665: string;
34735: string;
34737: string;
42113: string;
};
export const arrayFields: any[];
export const fieldTypeNames: {
1: string;
2: string;
3: string;
4: string;
5: string;
6: string;
7: string;
8: string;
9: string;
10: string;
11: string;
12: string;
13: string;
16: string;
17: string;
18: string;
};
export const fieldTypes: {};
export namespace photometricInterpretations {
const WhiteIsZero: number;
const BlackIsZero: number;
const RGB: number;
const Palette: number;
const TransparencyMask: number;
const CMYK: number;
const YCbCr: number;
const CIELab: number;
const ICCLab: number;
}
export namespace ExtraSamplesValues {
const Unspecified: number;
const Assocalpha: number;
const Unassalpha: number;
}
export namespace LercParameters {
const Version: number;
const AddCompression: number;
}
export namespace LercAddCompression {
const None: number;
const Deflate: number;
}
export const geoKeyNames: {
1024: string;
1025: string;
1026: string;
2048: string;
2049: string;
2050: string;
2051: string;
2052: string;
2053: string;
2054: string;
2055: string;
2056: string;
2057: string;
2058: string;
2059: string;
2060: string;
2061: string;
2062: string;
3072: string;
3073: string;
3074: string;
3075: string;
3076: string;
3077: string;
3078: string;
3079: string;
3080: string;
3081: string;
3082: string;
3083: string;
3084: string;
3085: string;
3086: string;
3087: string;
3088: string;
3089: string;
3090: string;
3091: string;
3092: string;
3093: string;
3094: string;
3095: string;
3096: string;
4096: string;
4097: string;
4098: string;
4099: string;
};
export const geoKeys: {};
//# sourceMappingURL=globals.d.ts.map

1
node_modules/geotiff/dist-node/globals.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"globals.d.ts","sourceRoot":"","sources":["../dist-module/globals.js"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAyHE;AAEF,2BAA4B;AAO5B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAoCE;AAEF,gCAUE;AAEF;;;;;;;;;;;;;;;;;EAmBE;AAEF,4BAA6B;;;;;;;;;;;;;;;;;;;;;;;;;AAoC7B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAgDE;AAEF,yBAA0B"}

276
node_modules/geotiff/dist-node/globals.js generated vendored Normal file
View File

@@ -0,0 +1,276 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.geoKeys = exports.geoKeyNames = exports.LercAddCompression = exports.LercParameters = exports.ExtraSamplesValues = exports.photometricInterpretations = exports.fieldTypes = exports.fieldTypeNames = exports.arrayFields = exports.fieldTagTypes = exports.fieldTags = exports.fieldTagNames = void 0;
exports.fieldTagNames = {
// TIFF Baseline
0x013B: 'Artist',
0x0102: 'BitsPerSample',
0x0109: 'CellLength',
0x0108: 'CellWidth',
0x0140: 'ColorMap',
0x0103: 'Compression',
0x8298: 'Copyright',
0x0132: 'DateTime',
0x0152: 'ExtraSamples',
0x010A: 'FillOrder',
0x0121: 'FreeByteCounts',
0x0120: 'FreeOffsets',
0x0123: 'GrayResponseCurve',
0x0122: 'GrayResponseUnit',
0x013C: 'HostComputer',
0x010E: 'ImageDescription',
0x0101: 'ImageLength',
0x0100: 'ImageWidth',
0x010F: 'Make',
0x0119: 'MaxSampleValue',
0x0118: 'MinSampleValue',
0x0110: 'Model',
0x00FE: 'NewSubfileType',
0x0112: 'Orientation',
0x0106: 'PhotometricInterpretation',
0x011C: 'PlanarConfiguration',
0x0128: 'ResolutionUnit',
0x0116: 'RowsPerStrip',
0x0115: 'SamplesPerPixel',
0x0131: 'Software',
0x0117: 'StripByteCounts',
0x0111: 'StripOffsets',
0x00FF: 'SubfileType',
0x0107: 'Threshholding',
0x011A: 'XResolution',
0x011B: 'YResolution',
// TIFF Extended
0x0146: 'BadFaxLines',
0x0147: 'CleanFaxData',
0x0157: 'ClipPath',
0x0148: 'ConsecutiveBadFaxLines',
0x01B1: 'Decode',
0x01B2: 'DefaultImageColor',
0x010D: 'DocumentName',
0x0150: 'DotRange',
0x0141: 'HalftoneHints',
0x015A: 'Indexed',
0x015B: 'JPEGTables',
0x011D: 'PageName',
0x0129: 'PageNumber',
0x013D: 'Predictor',
0x013F: 'PrimaryChromaticities',
0x0214: 'ReferenceBlackWhite',
0x0153: 'SampleFormat',
0x0154: 'SMinSampleValue',
0x0155: 'SMaxSampleValue',
0x022F: 'StripRowCounts',
0x014A: 'SubIFDs',
0x0124: 'T4Options',
0x0125: 'T6Options',
0x0145: 'TileByteCounts',
0x0143: 'TileLength',
0x0144: 'TileOffsets',
0x0142: 'TileWidth',
0x012D: 'TransferFunction',
0x013E: 'WhitePoint',
0x0158: 'XClipPathUnits',
0x011E: 'XPosition',
0x0211: 'YCbCrCoefficients',
0x0213: 'YCbCrPositioning',
0x0212: 'YCbCrSubSampling',
0x0159: 'YClipPathUnits',
0x011F: 'YPosition',
// EXIF
0x9202: 'ApertureValue',
0xA001: 'ColorSpace',
0x9004: 'DateTimeDigitized',
0x9003: 'DateTimeOriginal',
0x8769: 'Exif IFD',
0x9000: 'ExifVersion',
0x829A: 'ExposureTime',
0xA300: 'FileSource',
0x9209: 'Flash',
0xA000: 'FlashpixVersion',
0x829D: 'FNumber',
0xA420: 'ImageUniqueID',
0x9208: 'LightSource',
0x927C: 'MakerNote',
0x9201: 'ShutterSpeedValue',
0x9286: 'UserComment',
// IPTC
0x83BB: 'IPTC',
// ICC
0x8773: 'ICC Profile',
// XMP
0x02BC: 'XMP',
// GDAL
0xA480: 'GDAL_METADATA',
0xA481: 'GDAL_NODATA',
// Photoshop
0x8649: 'Photoshop',
// GeoTiff
0x830E: 'ModelPixelScale',
0x8482: 'ModelTiepoint',
0x85D8: 'ModelTransformation',
0x87AF: 'GeoKeyDirectory',
0x87B0: 'GeoDoubleParams',
0x87B1: 'GeoAsciiParams',
// LERC
0xC5F2: 'LercParameters',
};
exports.fieldTags = {};
for (const key in exports.fieldTagNames) {
if (exports.fieldTagNames.hasOwnProperty(key)) {
exports.fieldTags[exports.fieldTagNames[key]] = parseInt(key, 10);
}
}
exports.fieldTagTypes = {
256: 'SHORT',
257: 'SHORT',
258: 'SHORT',
259: 'SHORT',
262: 'SHORT',
273: 'LONG',
274: 'SHORT',
277: 'SHORT',
278: 'LONG',
279: 'LONG',
282: 'RATIONAL',
283: 'RATIONAL',
284: 'SHORT',
286: 'SHORT',
287: 'RATIONAL',
296: 'SHORT',
297: 'SHORT',
305: 'ASCII',
306: 'ASCII',
338: 'SHORT',
339: 'SHORT',
513: 'LONG',
514: 'LONG',
1024: 'SHORT',
1025: 'SHORT',
2048: 'SHORT',
2049: 'ASCII',
3072: 'SHORT',
3073: 'ASCII',
33550: 'DOUBLE',
33922: 'DOUBLE',
34665: 'LONG',
34735: 'SHORT',
34737: 'ASCII',
42113: 'ASCII',
};
exports.arrayFields = [
exports.fieldTags.BitsPerSample,
exports.fieldTags.ExtraSamples,
exports.fieldTags.SampleFormat,
exports.fieldTags.StripByteCounts,
exports.fieldTags.StripOffsets,
exports.fieldTags.StripRowCounts,
exports.fieldTags.TileByteCounts,
exports.fieldTags.TileOffsets,
exports.fieldTags.SubIFDs,
];
exports.fieldTypeNames = {
0x0001: 'BYTE',
0x0002: 'ASCII',
0x0003: 'SHORT',
0x0004: 'LONG',
0x0005: 'RATIONAL',
0x0006: 'SBYTE',
0x0007: 'UNDEFINED',
0x0008: 'SSHORT',
0x0009: 'SLONG',
0x000A: 'SRATIONAL',
0x000B: 'FLOAT',
0x000C: 'DOUBLE',
// IFD offset, suggested by https://owl.phy.queensu.ca/~phil/exiftool/standards.html
0x000D: 'IFD',
// introduced by BigTIFF
0x0010: 'LONG8',
0x0011: 'SLONG8',
0x0012: 'IFD8',
};
exports.fieldTypes = {};
for (const key in exports.fieldTypeNames) {
if (exports.fieldTypeNames.hasOwnProperty(key)) {
exports.fieldTypes[exports.fieldTypeNames[key]] = parseInt(key, 10);
}
}
exports.photometricInterpretations = {
WhiteIsZero: 0,
BlackIsZero: 1,
RGB: 2,
Palette: 3,
TransparencyMask: 4,
CMYK: 5,
YCbCr: 6,
CIELab: 8,
ICCLab: 9,
};
exports.ExtraSamplesValues = {
Unspecified: 0,
Assocalpha: 1,
Unassalpha: 2,
};
exports.LercParameters = {
Version: 0,
AddCompression: 1,
};
exports.LercAddCompression = {
None: 0,
Deflate: 1,
};
exports.geoKeyNames = {
1024: 'GTModelTypeGeoKey',
1025: 'GTRasterTypeGeoKey',
1026: 'GTCitationGeoKey',
2048: 'GeographicTypeGeoKey',
2049: 'GeogCitationGeoKey',
2050: 'GeogGeodeticDatumGeoKey',
2051: 'GeogPrimeMeridianGeoKey',
2052: 'GeogLinearUnitsGeoKey',
2053: 'GeogLinearUnitSizeGeoKey',
2054: 'GeogAngularUnitsGeoKey',
2055: 'GeogAngularUnitSizeGeoKey',
2056: 'GeogEllipsoidGeoKey',
2057: 'GeogSemiMajorAxisGeoKey',
2058: 'GeogSemiMinorAxisGeoKey',
2059: 'GeogInvFlatteningGeoKey',
2060: 'GeogAzimuthUnitsGeoKey',
2061: 'GeogPrimeMeridianLongGeoKey',
2062: 'GeogTOWGS84GeoKey',
3072: 'ProjectedCSTypeGeoKey',
3073: 'PCSCitationGeoKey',
3074: 'ProjectionGeoKey',
3075: 'ProjCoordTransGeoKey',
3076: 'ProjLinearUnitsGeoKey',
3077: 'ProjLinearUnitSizeGeoKey',
3078: 'ProjStdParallel1GeoKey',
3079: 'ProjStdParallel2GeoKey',
3080: 'ProjNatOriginLongGeoKey',
3081: 'ProjNatOriginLatGeoKey',
3082: 'ProjFalseEastingGeoKey',
3083: 'ProjFalseNorthingGeoKey',
3084: 'ProjFalseOriginLongGeoKey',
3085: 'ProjFalseOriginLatGeoKey',
3086: 'ProjFalseOriginEastingGeoKey',
3087: 'ProjFalseOriginNorthingGeoKey',
3088: 'ProjCenterLongGeoKey',
3089: 'ProjCenterLatGeoKey',
3090: 'ProjCenterEastingGeoKey',
3091: 'ProjCenterNorthingGeoKey',
3092: 'ProjScaleAtNatOriginGeoKey',
3093: 'ProjScaleAtCenterGeoKey',
3094: 'ProjAzimuthAngleGeoKey',
3095: 'ProjStraightVertPoleLongGeoKey',
3096: 'ProjRectifiedGridAngleGeoKey',
4096: 'VerticalCSTypeGeoKey',
4097: 'VerticalCitationGeoKey',
4098: 'VerticalDatumGeoKey',
4099: 'VerticalUnitsGeoKey',
};
exports.geoKeys = {};
for (const key in exports.geoKeyNames) {
if (exports.geoKeyNames.hasOwnProperty(key)) {
exports.geoKeys[exports.geoKeyNames[key]] = parseInt(key, 10);
}
}
//# sourceMappingURL=globals.js.map

1
node_modules/geotiff/dist-node/globals.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

13
node_modules/geotiff/dist-node/logging.d.ts generated vendored Normal file
View File

@@ -0,0 +1,13 @@
/**
*
* @param {object} logger the new logger. e.g `console`
*/
export function setLogger(logger?: object): void;
export function debug(...args: any[]): void;
export function log(...args: any[]): void;
export function info(...args: any[]): void;
export function warn(...args: any[]): void;
export function error(...args: any[]): void;
export function time(...args: any[]): void;
export function timeEnd(...args: any[]): void;
//# sourceMappingURL=logging.d.ts.map

1
node_modules/geotiff/dist-node/logging.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"logging.d.ts","sourceRoot":"","sources":["../dist-module/logging.js"],"names":[],"mappings":"AAqBA;;;GAGG;AACH,mCAFW,MAAM,QAIhB;AAED,4CAEC;AAED,0CAEC;AAED,2CAEC;AAED,2CAEC;AAED,4CAEC;AAED,2CAEC;AAED,8CAEC"}

53
node_modules/geotiff/dist-node/logging.js generated vendored Normal file
View File

@@ -0,0 +1,53 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.timeEnd = exports.time = exports.error = exports.warn = exports.info = exports.log = exports.debug = exports.setLogger = void 0;
/**
* A no-op logger
*/
class DummyLogger {
log() { }
debug() { }
info() { }
warn() { }
error() { }
time() { }
timeEnd() { }
}
let LOGGER = new DummyLogger();
/**
*
* @param {object} logger the new logger. e.g `console`
*/
function setLogger(logger = new DummyLogger()) {
LOGGER = logger;
}
exports.setLogger = setLogger;
function debug(...args) {
return LOGGER.debug(...args);
}
exports.debug = debug;
function log(...args) {
return LOGGER.log(...args);
}
exports.log = log;
function info(...args) {
return LOGGER.info(...args);
}
exports.info = info;
function warn(...args) {
return LOGGER.warn(...args);
}
exports.warn = warn;
function error(...args) {
return LOGGER.error(...args);
}
exports.error = error;
function time(...args) {
return LOGGER.time(...args);
}
exports.time = time;
function timeEnd(...args) {
return LOGGER.timeEnd(...args);
}
exports.timeEnd = timeEnd;
//# sourceMappingURL=logging.js.map

1
node_modules/geotiff/dist-node/logging.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../dist-module/logging.js"],"names":[],"mappings":";;;AAAA;;GAEG;AACH,MAAM,WAAW;IACf,GAAG,KAAI,CAAC;IAER,KAAK,KAAI,CAAC;IAEV,IAAI,KAAI,CAAC;IAET,IAAI,KAAI,CAAC;IAET,KAAK,KAAI,CAAC;IAEV,IAAI,KAAI,CAAC;IAET,OAAO,KAAI,CAAC;CACb;AAED,IAAI,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;AAE/B;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAM,GAAG,IAAI,WAAW,EAAE;IAClD,MAAM,GAAG,MAAM,CAAC;AAClB,CAAC;AAFD,8BAEC;AAED,SAAgB,KAAK,CAAC,GAAG,IAAI;IAC3B,OAAO,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;AAC/B,CAAC;AAFD,sBAEC;AAED,SAAgB,GAAG,CAAC,GAAG,IAAI;IACzB,OAAO,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;AAC7B,CAAC;AAFD,kBAEC;AAED,SAAgB,IAAI,CAAC,GAAG,IAAI;IAC1B,OAAO,MAAM,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;AAC9B,CAAC;AAFD,oBAEC;AAED,SAAgB,IAAI,CAAC,GAAG,IAAI;IAC1B,OAAO,MAAM,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;AAC9B,CAAC;AAFD,oBAEC;AAED,SAAgB,KAAK,CAAC,GAAG,IAAI;IAC3B,OAAO,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;AAC/B,CAAC;AAFD,sBAEC;AAED,SAAgB,IAAI,CAAC,GAAG,IAAI;IAC1B,OAAO,MAAM,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;AAC9B,CAAC;AAFD,oBAEC;AAED,SAAgB,OAAO,CAAC,GAAG,IAAI;IAC7B,OAAO,MAAM,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC,CAAC;AACjC,CAAC;AAFD,0BAEC"}

1
node_modules/geotiff/dist-node/package.json generated vendored Normal file
View File

@@ -0,0 +1 @@
{"type":"commonjs"}

51
node_modules/geotiff/dist-node/pool.d.ts generated vendored Normal file
View File

@@ -0,0 +1,51 @@
export default Pool;
/**
* @module pool
*/
/**
* Pool for workers to decode chunks of the images.
*/
declare class Pool {
/**
* @constructor
* @param {Number} [size] The size of the pool. Defaults to the number of CPUs
* available. When this parameter is `null` or 0, then the
* decoding will be done in the main thread.
* @param {function(): Worker} [createWorker] A function that creates the decoder worker.
* Defaults to a worker with all decoders that ship with geotiff.js. The `createWorker()`
* function is expected to return a `Worker` compatible with Web Workers. For code that
* runs in Node, [web-worker](https://www.npmjs.com/package/web-worker) is a good choice.
*
* A worker that uses a custom lzw decoder would look like this `my-custom-worker.js` file:
* ```js
* import { addDecoder, getDecoder } from 'geotiff';
* addDecoder(5, () => import ('./my-custom-lzw').then((m) => m.default));
* self.addEventListener('message', async (e) => {
* const { id, fileDirectory, buffer } = e.data;
* const decoder = await getDecoder(fileDirectory);
* const decoded = await decoder.decode(fileDirectory, buffer);
* self.postMessage({ decoded, id }, [decoded]);
* });
* ```
* The way the above code is built into a worker by the `createWorker()` function
* depends on the used bundler. For most bundlers, something like this will work:
* ```js
* function createWorker() {
* return new Worker(new URL('./my-custom-worker.js', import.meta.url));
* }
* ```
*/
constructor(size?: number | undefined, createWorker?: (() => Worker) | undefined);
workers: any[] | null;
_awaitingDecoder: Promise<any> | null;
size: number;
messageId: number;
/**
* Decode the given block of bytes with the set compression method.
* @param {ArrayBuffer} buffer the array buffer of bytes to decode.
* @returns {Promise<ArrayBuffer>} the decoded result as a `Promise`
*/
decode(fileDirectory: any, buffer: ArrayBuffer): Promise<ArrayBuffer>;
destroy(): void;
}
//# sourceMappingURL=pool.d.ts.map

1
node_modules/geotiff/dist-node/pool.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"pool.d.ts","sourceRoot":"","sources":["../dist-module/pool.js"],"names":[],"mappings":";AAIA;;GAEG;AAEH;;GAEG;AACH;IACE;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA4BG;IACH,kFAmBC;IAlBC,sBAAmB;IACnB,sCAA4B;IAC5B,aAAgB;IAChB,kBAAkB;IAiBpB;;;;OAIG;IACH,mCAHW,WAAW,GACT,QAAQ,WAAW,CAAC,CAuBhC;IAED,gBAOC;CACF"}

117
node_modules/geotiff/dist-node/pool.js generated vendored Normal file
View File

@@ -0,0 +1,117 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const index_js_1 = require("./compression/index.js");
const defaultPoolSize = typeof navigator !== 'undefined' ? (navigator.hardwareConcurrency || 2) : 2;
/**
* @module pool
*/
/**
* Pool for workers to decode chunks of the images.
*/
class Pool {
/**
* @constructor
* @param {Number} [size] The size of the pool. Defaults to the number of CPUs
* available. When this parameter is `null` or 0, then the
* decoding will be done in the main thread.
* @param {function(): Worker} [createWorker] A function that creates the decoder worker.
* Defaults to a worker with all decoders that ship with geotiff.js. The `createWorker()`
* function is expected to return a `Worker` compatible with Web Workers. For code that
* runs in Node, [web-worker](https://www.npmjs.com/package/web-worker) is a good choice.
*
* A worker that uses a custom lzw decoder would look like this `my-custom-worker.js` file:
* ```js
* import { addDecoder, getDecoder } from 'geotiff';
* addDecoder(5, () => import ('./my-custom-lzw').then((m) => m.default));
* self.addEventListener('message', async (e) => {
* const { id, fileDirectory, buffer } = e.data;
* const decoder = await getDecoder(fileDirectory);
* const decoded = await decoder.decode(fileDirectory, buffer);
* self.postMessage({ decoded, id }, [decoded]);
* });
* ```
* The way the above code is built into a worker by the `createWorker()` function
* depends on the used bundler. For most bundlers, something like this will work:
* ```js
* function createWorker() {
* return new Worker(new URL('./my-custom-worker.js', import.meta.url));
* }
* ```
*/
constructor(size = defaultPoolSize, createWorker) {
this.workers = null;
this._awaitingDecoder = null;
this.size = size;
this.messageId = 0;
if (size) {
this._awaitingDecoder = createWorker ? Promise.resolve(createWorker) : new Promise((resolve) => {
Promise.resolve().then(() => __importStar(require('./worker/decoder.js'))).then((module) => {
resolve(module.create);
});
});
this._awaitingDecoder.then((create) => {
this._awaitingDecoder = null;
this.workers = [];
for (let i = 0; i < size; i++) {
this.workers.push({ worker: create(), idle: true });
}
});
}
}
/**
* Decode the given block of bytes with the set compression method.
* @param {ArrayBuffer} buffer the array buffer of bytes to decode.
* @returns {Promise<ArrayBuffer>} the decoded result as a `Promise`
*/
async decode(fileDirectory, buffer) {
if (this._awaitingDecoder) {
await this._awaitingDecoder;
}
return this.size === 0
? (0, index_js_1.getDecoder)(fileDirectory).then((decoder) => decoder.decode(fileDirectory, buffer))
: new Promise((resolve) => {
const worker = this.workers.find((candidate) => candidate.idle)
|| this.workers[Math.floor(Math.random() * this.size)];
worker.idle = false;
const id = this.messageId++;
const onMessage = (e) => {
if (e.data.id === id) {
worker.idle = true;
resolve(e.data.decoded);
worker.worker.removeEventListener('message', onMessage);
}
};
worker.worker.addEventListener('message', onMessage);
worker.worker.postMessage({ fileDirectory, buffer, id }, [buffer]);
});
}
destroy() {
if (this.workers) {
this.workers.forEach((worker) => {
worker.worker.terminate();
});
this.workers = null;
}
}
}
exports.default = Pool;
//# sourceMappingURL=pool.js.map

1
node_modules/geotiff/dist-node/pool.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"pool.js","sourceRoot":"","sources":["../dist-module/pool.js"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,qDAAoD;AAEpD,MAAM,eAAe,GAAG,OAAO,SAAS,KAAK,WAAW,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,mBAAmB,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAEpG;;GAEG;AAEH;;GAEG;AACH,MAAM,IAAI;IACR;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA4BG;IACH,YAAY,IAAI,GAAG,eAAe,EAAE,YAAY;QAC9C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;QACpB,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC;QAC7B,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC;QACnB,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,gBAAgB,GAAG,YAAY,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;gBAC7F,kDAAO,qBAAqB,IAAE,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE;oBAC5C,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;gBACzB,CAAC,CAAC,CAAC;YACL,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE;gBACpC,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC;gBAC7B,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;gBAClB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE;oBAC7B,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;iBACrD;YACH,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM;QAChC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACzB,MAAM,IAAI,CAAC,gBAAgB,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,IAAI,KAAK,CAAC;YACpB,CAAC,CAAC,IAAA,qBAAU,EAAC,aAAa,CAAC,CAAC,IAAI,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;YACpF,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;gBACxB,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC;uBAC1D,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;gBACzD,MAAM,CAAC,IAAI,GAAG,KAAK,CAAC;gBACpB,MAAM,EAAE,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC;gBAC5B,MAAM,SAAS,GAAG,CAAC,CAAC,EAAE,EAAE;oBACtB,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,EAAE;wBACpB,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC;wBACnB,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;wBACxB,MAAM,CAAC,MAAM,CAAC,mBAAmB,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;qBACzD;gBACH,CAAC,CAAC;gBACF,MAAM,CAAC,MAAM,CAAC,gBAAgB,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;gBACrD,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,EAAE,aAAa,EAAE,MAAM,EAAE,EAAE,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC;YACrE,CAAC,CAAC,CAAC;IACP,CAAC;IAED,OAAO;QACL,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE;gBAC9B,MAAM,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;YAC5B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;SACrB;IACH,CAAC;CACF;AAED,kBAAe,IAAI,CAAC"}

2
node_modules/geotiff/dist-node/predictor.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
export function applyPredictor(block: any, predictor: any, width: any, height: any, bitsPerSample: any, planarConfiguration: any): any;
//# sourceMappingURL=predictor.d.ts.map

1
node_modules/geotiff/dist-node/predictor.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"predictor.d.ts","sourceRoot":"","sources":["../dist-module/predictor.js"],"names":[],"mappings":"AAkCA,uIAqDC"}

77
node_modules/geotiff/dist-node/predictor.js generated vendored Normal file
View File

@@ -0,0 +1,77 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.applyPredictor = void 0;
function decodeRowAcc(row, stride) {
let length = row.length - stride;
let offset = 0;
do {
for (let i = stride; i > 0; i--) {
row[offset + stride] += row[offset];
offset++;
}
length -= stride;
} while (length > 0);
}
function decodeRowFloatingPoint(row, stride, bytesPerSample) {
let index = 0;
let count = row.length;
const wc = count / bytesPerSample;
while (count > stride) {
for (let i = stride; i > 0; --i) {
row[index + stride] += row[index];
++index;
}
count -= stride;
}
const copy = row.slice();
for (let i = 0; i < wc; ++i) {
for (let b = 0; b < bytesPerSample; ++b) {
row[(bytesPerSample * i) + b] = copy[((bytesPerSample - b - 1) * wc) + i];
}
}
}
function applyPredictor(block, predictor, width, height, bitsPerSample, planarConfiguration) {
if (!predictor || predictor === 1) {
return block;
}
for (let i = 0; i < bitsPerSample.length; ++i) {
if (bitsPerSample[i] % 8 !== 0) {
throw new Error('When decoding with predictor, only multiple of 8 bits are supported.');
}
if (bitsPerSample[i] !== bitsPerSample[0]) {
throw new Error('When decoding with predictor, all samples must have the same size.');
}
}
const bytesPerSample = bitsPerSample[0] / 8;
const stride = planarConfiguration === 2 ? 1 : bitsPerSample.length;
for (let i = 0; i < height; ++i) {
// Last strip will be truncated if height % stripHeight != 0
if (i * stride * width * bytesPerSample >= block.byteLength) {
break;
}
let row;
if (predictor === 2) { // horizontal prediction
switch (bitsPerSample[0]) {
case 8:
row = new Uint8Array(block, i * stride * width * bytesPerSample, stride * width * bytesPerSample);
break;
case 16:
row = new Uint16Array(block, i * stride * width * bytesPerSample, stride * width * bytesPerSample / 2);
break;
case 32:
row = new Uint32Array(block, i * stride * width * bytesPerSample, stride * width * bytesPerSample / 4);
break;
default:
throw new Error(`Predictor 2 not allowed with ${bitsPerSample[0]} bits per sample.`);
}
decodeRowAcc(row, stride, bytesPerSample);
}
else if (predictor === 3) { // horizontal floating point
row = new Uint8Array(block, i * stride * width * bytesPerSample, stride * width * bytesPerSample);
decodeRowFloatingPoint(row, stride, bytesPerSample);
}
}
return block;
}
exports.applyPredictor = applyPredictor;
//# sourceMappingURL=predictor.js.map

1
node_modules/geotiff/dist-node/predictor.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"predictor.js","sourceRoot":"","sources":["../dist-module/predictor.js"],"names":[],"mappings":";;;AAAA,SAAS,YAAY,CAAC,GAAG,EAAE,MAAM;IAC/B,IAAI,MAAM,GAAG,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC;IACjC,IAAI,MAAM,GAAG,CAAC,CAAC;IACf,GAAG;QACD,KAAK,IAAI,CAAC,GAAG,MAAM,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC/B,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC;YACpC,MAAM,EAAE,CAAC;SACV;QAED,MAAM,IAAI,MAAM,CAAC;KAClB,QAAQ,MAAM,GAAG,CAAC,EAAE;AACvB,CAAC;AAED,SAAS,sBAAsB,CAAC,GAAG,EAAE,MAAM,EAAE,cAAc;IACzD,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,IAAI,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC;IACvB,MAAM,EAAE,GAAG,KAAK,GAAG,cAAc,CAAC;IAElC,OAAO,KAAK,GAAG,MAAM,EAAE;QACrB,KAAK,IAAI,CAAC,GAAG,MAAM,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,EAAE;YAC/B,GAAG,CAAC,KAAK,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC;YAClC,EAAE,KAAK,CAAC;SACT;QACD,KAAK,IAAI,MAAM,CAAC;KACjB;IAED,MAAM,IAAI,GAAG,GAAG,CAAC,KAAK,EAAE,CAAC;IACzB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,EAAE,CAAC,EAAE;QAC3B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,EAAE,CAAC,EAAE;YACvC,GAAG,CAAC,CAAC,cAAc,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;SAC3E;KACF;AACH,CAAC;AAED,SAAgB,cAAc,CAAC,KAAK,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,EAAE,aAAa,EAC3E,mBAAmB;IACnB,IAAI,CAAC,SAAS,IAAI,SAAS,KAAK,CAAC,EAAE;QACjC,OAAO,KAAK,CAAC;KACd;IAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAC7C,IAAI,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;YAC9B,MAAM,IAAI,KAAK,CAAC,sEAAsE,CAAC,CAAC;SACzF;QACD,IAAI,aAAa,CAAC,CAAC,CAAC,KAAK,aAAa,CAAC,CAAC,CAAC,EAAE;YACzC,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;KACF;IAED,MAAM,cAAc,GAAG,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;IAC5C,MAAM,MAAM,GAAG,mBAAmB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC;IAEpE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,EAAE,CAAC,EAAE;QAC/B,4DAA4D;QAC5D,IAAI,CAAC,GAAG,MAAM,GAAG,KAAK,GAAG,cAAc,IAAI,KAAK,CAAC,UAAU,EAAE;YAC3D,MAAM;SACP;QACD,IAAI,GAAG,CAAC;QACR,IAAI,SAAS,KAAK,CAAC,EAAE,EAAE,wBAAwB;YAC7C,QAAQ,aAAa,CAAC,CAAC,CAAC,EAAE;gBACxB,KAAK,CAAC;oBACJ,GAAG,GAAG,IAAI,UAAU,CAClB,KAAK,EAAE,CAAC,GAAG,MAAM,GAAG,KAAK,GAAG,cAAc,EAAE,MAAM,GAAG,KAAK,GAAG,cAAc,CAC5E,CAAC;oBACF,MAAM;gBACR,KAAK,EAAE;oBACL,GAAG,GAAG,IAAI,WAAW,CACnB,KAAK,EAAE,CAAC,GAAG,MAAM,GAAG,KAAK,GAAG,cAAc,EAAE,MAAM,GAAG,KAAK,GAAG,cAAc,GAAG,CAAC,CAChF,CAAC;oBACF,MAAM;gBACR,KAAK,EAAE;oBACL,GAAG,GAAG,IAAI,WAAW,CACnB,KAAK,EAAE,CAAC,GAAG,MAAM,GAAG,KAAK,GAAG,cAAc,EAAE,MAAM,GAAG,KAAK,GAAG,cAAc,GAAG,CAAC,CAChF,CAAC;oBACF,MAAM;gBACR;oBACE,MAAM,IAAI,KAAK,CAAC,gCAAgC,aAAa,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC;aACxF;YACD,YAAY,CAAC,GAAG,EAAE,MAAM,EAAE,cAAc,CAAC,CAAC;SAC3C;aAAM,IAAI,SAAS,KAAK,CAAC,EAAE,EAAE,4BAA4B;YACxD,GAAG,GAAG,IAAI,UAAU,CAClB,KAAK,EAAE,CAAC,GAAG,MAAM,GAAG,KAAK,GAAG,cAAc,EAAE,MAAM,GAAG,KAAK,GAAG,cAAc,CAC5E,CAAC;YACF,sBAAsB,CAAC,GAAG,EAAE,MAAM,EAAE,cAAc,CAAC,CAAC;SACrD;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AArDD,wCAqDC"}

69
node_modules/geotiff/dist-node/resample.d.ts generated vendored Normal file
View File

@@ -0,0 +1,69 @@
/**
* Resample the input arrays using nearest neighbor value selection.
* @param {TypedArray[]} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @returns {TypedArray[]} The resampled rasters
*/
export function resampleNearest(valueArrays: TypedArray[], inWidth: number, inHeight: number, outWidth: number, outHeight: number): TypedArray[];
/**
* Resample the input arrays using bilinear interpolation.
* @param {TypedArray[]} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @returns {TypedArray[]} The resampled rasters
*/
export function resampleBilinear(valueArrays: TypedArray[], inWidth: number, inHeight: number, outWidth: number, outHeight: number): TypedArray[];
/**
* Resample the input arrays using the selected resampling method.
* @param {TypedArray[]} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {string} [method = 'nearest'] The desired resampling method
* @returns {TypedArray[]} The resampled rasters
*/
export function resample(valueArrays: TypedArray[], inWidth: number, inHeight: number, outWidth: number, outHeight: number, method?: string | undefined): TypedArray[];
/**
* Resample the pixel interleaved input array using nearest neighbor value selection.
* @param {TypedArray} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {number} samples The number of samples per pixel for pixel
* interleaved data
* @returns {TypedArray} The resampled raster
*/
export function resampleNearestInterleaved(valueArray: any, inWidth: number, inHeight: number, outWidth: number, outHeight: number, samples: number): TypedArray;
/**
* Resample the pixel interleaved input array using bilinear interpolation.
* @param {TypedArray} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {number} samples The number of samples per pixel for pixel
* interleaved data
* @returns {TypedArray} The resampled raster
*/
export function resampleBilinearInterleaved(valueArray: any, inWidth: number, inHeight: number, outWidth: number, outHeight: number, samples: number): TypedArray;
/**
* Resample the pixel interleaved input array using the selected resampling method.
* @param {TypedArray} valueArray The input array to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {number} samples The number of samples per pixel for pixel
* interleaved data
* @param {string} [method = 'nearest'] The desired resampling method
* @returns {TypedArray} The resampled rasters
*/
export function resampleInterleaved(valueArray: TypedArray, inWidth: number, inHeight: number, outWidth: number, outHeight: number, samples: number, method?: string | undefined): TypedArray;
//# sourceMappingURL=resample.d.ts.map

1
node_modules/geotiff/dist-node/resample.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"resample.d.ts","sourceRoot":"","sources":["../dist-module/resample.js"],"names":[],"mappings":"AAQA;;;;;;;;GAQG;AACH,6CAPW,YAAY,WACZ,MAAM,YACN,MAAM,YACN,MAAM,aACN,MAAM,GACJ,YAAY,CAiBxB;AAQD;;;;;;;;GAQG;AACH,8CAPW,YAAY,WACZ,MAAM,YACN,MAAM,YACN,MAAM,aACN,MAAM,GACJ,YAAY,CAoCxB;AAED;;;;;;;;;GASG;AACH,sCARW,YAAY,WACZ,MAAM,YACN,MAAM,YACN,MAAM,aACN,MAAM,gCAEJ,YAAY,CAYxB;AAED;;;;;;;;;;GAUG;AACH,qEARW,MAAM,YACN,MAAM,YACN,MAAM,aACN,MAAM,WACN,MAAM,cAqBhB;AAED;;;;;;;;;;GAUG;AACH,sEARW,MAAM,YACN,MAAM,YACN,MAAM,aACN,MAAM,WACN,MAAM,cAsChB;AAED;;;;;;;;;;;GAWG;AACH,qEATW,MAAM,YACN,MAAM,YACN,MAAM,aACN,MAAM,WACN,MAAM,2CAmBhB"}

187
node_modules/geotiff/dist-node/resample.js generated vendored Normal file
View File

@@ -0,0 +1,187 @@
"use strict";
/**
* @module resample
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.resampleInterleaved = exports.resampleBilinearInterleaved = exports.resampleNearestInterleaved = exports.resample = exports.resampleBilinear = exports.resampleNearest = void 0;
function copyNewSize(array, width, height, samplesPerPixel = 1) {
return new (Object.getPrototypeOf(array).constructor)(width * height * samplesPerPixel);
}
/**
* Resample the input arrays using nearest neighbor value selection.
* @param {TypedArray[]} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @returns {TypedArray[]} The resampled rasters
*/
function resampleNearest(valueArrays, inWidth, inHeight, outWidth, outHeight) {
const relX = inWidth / outWidth;
const relY = inHeight / outHeight;
return valueArrays.map((array) => {
const newArray = copyNewSize(array, outWidth, outHeight);
for (let y = 0; y < outHeight; ++y) {
const cy = Math.min(Math.round(relY * y), inHeight - 1);
for (let x = 0; x < outWidth; ++x) {
const cx = Math.min(Math.round(relX * x), inWidth - 1);
const value = array[(cy * inWidth) + cx];
newArray[(y * outWidth) + x] = value;
}
}
return newArray;
});
}
exports.resampleNearest = resampleNearest;
// simple linear interpolation, code from:
// https://en.wikipedia.org/wiki/Linear_interpolation#Programming_language_support
function lerp(v0, v1, t) {
return ((1 - t) * v0) + (t * v1);
}
/**
* Resample the input arrays using bilinear interpolation.
* @param {TypedArray[]} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @returns {TypedArray[]} The resampled rasters
*/
function resampleBilinear(valueArrays, inWidth, inHeight, outWidth, outHeight) {
const relX = inWidth / outWidth;
const relY = inHeight / outHeight;
return valueArrays.map((array) => {
const newArray = copyNewSize(array, outWidth, outHeight);
for (let y = 0; y < outHeight; ++y) {
const rawY = relY * y;
const yl = Math.floor(rawY);
const yh = Math.min(Math.ceil(rawY), (inHeight - 1));
for (let x = 0; x < outWidth; ++x) {
const rawX = relX * x;
const tx = rawX % 1;
const xl = Math.floor(rawX);
const xh = Math.min(Math.ceil(rawX), (inWidth - 1));
const ll = array[(yl * inWidth) + xl];
const hl = array[(yl * inWidth) + xh];
const lh = array[(yh * inWidth) + xl];
const hh = array[(yh * inWidth) + xh];
const value = lerp(lerp(ll, hl, tx), lerp(lh, hh, tx), rawY % 1);
newArray[(y * outWidth) + x] = value;
}
}
return newArray;
});
}
exports.resampleBilinear = resampleBilinear;
/**
* Resample the input arrays using the selected resampling method.
* @param {TypedArray[]} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {string} [method = 'nearest'] The desired resampling method
* @returns {TypedArray[]} The resampled rasters
*/
function resample(valueArrays, inWidth, inHeight, outWidth, outHeight, method = 'nearest') {
switch (method.toLowerCase()) {
case 'nearest':
return resampleNearest(valueArrays, inWidth, inHeight, outWidth, outHeight);
case 'bilinear':
case 'linear':
return resampleBilinear(valueArrays, inWidth, inHeight, outWidth, outHeight);
default:
throw new Error(`Unsupported resampling method: '${method}'`);
}
}
exports.resample = resample;
/**
* Resample the pixel interleaved input array using nearest neighbor value selection.
* @param {TypedArray} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {number} samples The number of samples per pixel for pixel
* interleaved data
* @returns {TypedArray} The resampled raster
*/
function resampleNearestInterleaved(valueArray, inWidth, inHeight, outWidth, outHeight, samples) {
const relX = inWidth / outWidth;
const relY = inHeight / outHeight;
const newArray = copyNewSize(valueArray, outWidth, outHeight, samples);
for (let y = 0; y < outHeight; ++y) {
const cy = Math.min(Math.round(relY * y), inHeight - 1);
for (let x = 0; x < outWidth; ++x) {
const cx = Math.min(Math.round(relX * x), inWidth - 1);
for (let i = 0; i < samples; ++i) {
const value = valueArray[(cy * inWidth * samples) + (cx * samples) + i];
newArray[(y * outWidth * samples) + (x * samples) + i] = value;
}
}
}
return newArray;
}
exports.resampleNearestInterleaved = resampleNearestInterleaved;
/**
* Resample the pixel interleaved input array using bilinear interpolation.
* @param {TypedArray} valueArrays The input arrays to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {number} samples The number of samples per pixel for pixel
* interleaved data
* @returns {TypedArray} The resampled raster
*/
function resampleBilinearInterleaved(valueArray, inWidth, inHeight, outWidth, outHeight, samples) {
const relX = inWidth / outWidth;
const relY = inHeight / outHeight;
const newArray = copyNewSize(valueArray, outWidth, outHeight, samples);
for (let y = 0; y < outHeight; ++y) {
const rawY = relY * y;
const yl = Math.floor(rawY);
const yh = Math.min(Math.ceil(rawY), (inHeight - 1));
for (let x = 0; x < outWidth; ++x) {
const rawX = relX * x;
const tx = rawX % 1;
const xl = Math.floor(rawX);
const xh = Math.min(Math.ceil(rawX), (inWidth - 1));
for (let i = 0; i < samples; ++i) {
const ll = valueArray[(yl * inWidth * samples) + (xl * samples) + i];
const hl = valueArray[(yl * inWidth * samples) + (xh * samples) + i];
const lh = valueArray[(yh * inWidth * samples) + (xl * samples) + i];
const hh = valueArray[(yh * inWidth * samples) + (xh * samples) + i];
const value = lerp(lerp(ll, hl, tx), lerp(lh, hh, tx), rawY % 1);
newArray[(y * outWidth * samples) + (x * samples) + i] = value;
}
}
}
return newArray;
}
exports.resampleBilinearInterleaved = resampleBilinearInterleaved;
/**
* Resample the pixel interleaved input array using the selected resampling method.
* @param {TypedArray} valueArray The input array to resample
* @param {number} inWidth The width of the input rasters
* @param {number} inHeight The height of the input rasters
* @param {number} outWidth The desired width of the output rasters
* @param {number} outHeight The desired height of the output rasters
* @param {number} samples The number of samples per pixel for pixel
* interleaved data
* @param {string} [method = 'nearest'] The desired resampling method
* @returns {TypedArray} The resampled rasters
*/
function resampleInterleaved(valueArray, inWidth, inHeight, outWidth, outHeight, samples, method = 'nearest') {
switch (method.toLowerCase()) {
case 'nearest':
return resampleNearestInterleaved(valueArray, inWidth, inHeight, outWidth, outHeight, samples);
case 'bilinear':
case 'linear':
return resampleBilinearInterleaved(valueArray, inWidth, inHeight, outWidth, outHeight, samples);
default:
throw new Error(`Unsupported resampling method: '${method}'`);
}
}
exports.resampleInterleaved = resampleInterleaved;
//# sourceMappingURL=resample.js.map

1
node_modules/geotiff/dist-node/resample.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

7
node_modules/geotiff/dist-node/rgb.d.ts generated vendored Normal file
View File

@@ -0,0 +1,7 @@
export function fromWhiteIsZero(raster: any, max: any): Uint8Array;
export function fromBlackIsZero(raster: any, max: any): Uint8Array;
export function fromPalette(raster: any, colorMap: any): Uint8Array;
export function fromCMYK(cmykRaster: any): Uint8Array;
export function fromYCbCr(yCbCrRaster: any): Uint8ClampedArray;
export function fromCIELab(cieLabRaster: any): Uint8Array;
//# sourceMappingURL=rgb.d.ts.map

1
node_modules/geotiff/dist-node/rgb.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"rgb.d.ts","sourceRoot":"","sources":["../dist-module/rgb.js"],"names":[],"mappings":"AAAA,mEAWC;AAED,mEAWC;AAED,oEAYC;AAED,sDAcC;AAED,+DAaC;AAQD,0DAiCC"}

106
node_modules/geotiff/dist-node/rgb.js generated vendored Normal file
View File

@@ -0,0 +1,106 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.fromCIELab = exports.fromYCbCr = exports.fromCMYK = exports.fromPalette = exports.fromBlackIsZero = exports.fromWhiteIsZero = void 0;
function fromWhiteIsZero(raster, max) {
const { width, height } = raster;
const rgbRaster = new Uint8Array(width * height * 3);
let value;
for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {
value = 256 - (raster[i] / max * 256);
rgbRaster[j] = value;
rgbRaster[j + 1] = value;
rgbRaster[j + 2] = value;
}
return rgbRaster;
}
exports.fromWhiteIsZero = fromWhiteIsZero;
function fromBlackIsZero(raster, max) {
const { width, height } = raster;
const rgbRaster = new Uint8Array(width * height * 3);
let value;
for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {
value = raster[i] / max * 256;
rgbRaster[j] = value;
rgbRaster[j + 1] = value;
rgbRaster[j + 2] = value;
}
return rgbRaster;
}
exports.fromBlackIsZero = fromBlackIsZero;
function fromPalette(raster, colorMap) {
const { width, height } = raster;
const rgbRaster = new Uint8Array(width * height * 3);
const greenOffset = colorMap.length / 3;
const blueOffset = colorMap.length / 3 * 2;
for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {
const mapIndex = raster[i];
rgbRaster[j] = colorMap[mapIndex] / 65536 * 256;
rgbRaster[j + 1] = colorMap[mapIndex + greenOffset] / 65536 * 256;
rgbRaster[j + 2] = colorMap[mapIndex + blueOffset] / 65536 * 256;
}
return rgbRaster;
}
exports.fromPalette = fromPalette;
function fromCMYK(cmykRaster) {
const { width, height } = cmykRaster;
const rgbRaster = new Uint8Array(width * height * 3);
for (let i = 0, j = 0; i < cmykRaster.length; i += 4, j += 3) {
const c = cmykRaster[i];
const m = cmykRaster[i + 1];
const y = cmykRaster[i + 2];
const k = cmykRaster[i + 3];
rgbRaster[j] = 255 * ((255 - c) / 256) * ((255 - k) / 256);
rgbRaster[j + 1] = 255 * ((255 - m) / 256) * ((255 - k) / 256);
rgbRaster[j + 2] = 255 * ((255 - y) / 256) * ((255 - k) / 256);
}
return rgbRaster;
}
exports.fromCMYK = fromCMYK;
function fromYCbCr(yCbCrRaster) {
const { width, height } = yCbCrRaster;
const rgbRaster = new Uint8ClampedArray(width * height * 3);
for (let i = 0, j = 0; i < yCbCrRaster.length; i += 3, j += 3) {
const y = yCbCrRaster[i];
const cb = yCbCrRaster[i + 1];
const cr = yCbCrRaster[i + 2];
rgbRaster[j] = (y + (1.40200 * (cr - 0x80)));
rgbRaster[j + 1] = (y - (0.34414 * (cb - 0x80)) - (0.71414 * (cr - 0x80)));
rgbRaster[j + 2] = (y + (1.77200 * (cb - 0x80)));
}
return rgbRaster;
}
exports.fromYCbCr = fromYCbCr;
const Xn = 0.95047;
const Yn = 1.00000;
const Zn = 1.08883;
// from https://github.com/antimatter15/rgb-lab/blob/master/color.js
function fromCIELab(cieLabRaster) {
const { width, height } = cieLabRaster;
const rgbRaster = new Uint8Array(width * height * 3);
for (let i = 0, j = 0; i < cieLabRaster.length; i += 3, j += 3) {
const L = cieLabRaster[i + 0];
const a_ = cieLabRaster[i + 1] << 24 >> 24; // conversion from uint8 to int8
const b_ = cieLabRaster[i + 2] << 24 >> 24; // same
let y = (L + 16) / 116;
let x = (a_ / 500) + y;
let z = y - (b_ / 200);
let r;
let g;
let b;
x = Xn * ((x * x * x > 0.008856) ? x * x * x : (x - (16 / 116)) / 7.787);
y = Yn * ((y * y * y > 0.008856) ? y * y * y : (y - (16 / 116)) / 7.787);
z = Zn * ((z * z * z > 0.008856) ? z * z * z : (z - (16 / 116)) / 7.787);
r = (x * 3.2406) + (y * -1.5372) + (z * -0.4986);
g = (x * -0.9689) + (y * 1.8758) + (z * 0.0415);
b = (x * 0.0557) + (y * -0.2040) + (z * 1.0570);
r = (r > 0.0031308) ? ((1.055 * (r ** (1 / 2.4))) - 0.055) : 12.92 * r;
g = (g > 0.0031308) ? ((1.055 * (g ** (1 / 2.4))) - 0.055) : 12.92 * g;
b = (b > 0.0031308) ? ((1.055 * (b ** (1 / 2.4))) - 0.055) : 12.92 * b;
rgbRaster[j] = Math.max(0, Math.min(1, r)) * 255;
rgbRaster[j + 1] = Math.max(0, Math.min(1, g)) * 255;
rgbRaster[j + 2] = Math.max(0, Math.min(1, b)) * 255;
}
return rgbRaster;
}
exports.fromCIELab = fromCIELab;
//# sourceMappingURL=rgb.js.map

1
node_modules/geotiff/dist-node/rgb.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,8 @@
export function makeBufferSource(arrayBuffer: any): ArrayBufferSource;
declare class ArrayBufferSource extends BaseSource {
constructor(arrayBuffer: any);
arrayBuffer: any;
}
import { BaseSource } from "./basesource.js";
export {};
//# sourceMappingURL=arraybuffer.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"arraybuffer.d.ts","sourceRoot":"","sources":["../../dist-module/source/arraybuffer.js"],"names":[],"mappings":"AAiBA,sEAEC;AAhBD;IACE,8BAGC;IADC,iBAA8B;CASjC"}

22
node_modules/geotiff/dist-node/source/arraybuffer.js generated vendored Normal file
View File

@@ -0,0 +1,22 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.makeBufferSource = void 0;
const basesource_js_1 = require("./basesource.js");
const utils_js_1 = require("../utils.js");
class ArrayBufferSource extends basesource_js_1.BaseSource {
constructor(arrayBuffer) {
super();
this.arrayBuffer = arrayBuffer;
}
fetchSlice(slice, signal) {
if (signal && signal.aborted) {
throw new utils_js_1.AbortError('Request aborted');
}
return this.arrayBuffer.slice(slice.offset, slice.offset + slice.length);
}
}
function makeBufferSource(arrayBuffer) {
return new ArrayBufferSource(arrayBuffer);
}
exports.makeBufferSource = makeBufferSource;
//# sourceMappingURL=arraybuffer.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"arraybuffer.js","sourceRoot":"","sources":["../../dist-module/source/arraybuffer.js"],"names":[],"mappings":";;;AAAA,mDAA6C;AAC7C,0CAAyC;AAEzC,MAAM,iBAAkB,SAAQ,0BAAU;IACxC,YAAY,WAAW;QACrB,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;IACjC,CAAC;IAED,UAAU,CAAC,KAAK,EAAE,MAAM;QACtB,IAAI,MAAM,IAAI,MAAM,CAAC,OAAO,EAAE;YAC5B,MAAM,IAAI,qBAAU,CAAC,iBAAiB,CAAC,CAAC;SACzC;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;IAC3E,CAAC;CACF;AAED,SAAgB,gBAAgB,CAAC,WAAW;IAC1C,OAAO,IAAI,iBAAiB,CAAC,WAAW,CAAC,CAAC;AAC5C,CAAC;AAFD,4CAEC"}

29
node_modules/geotiff/dist-node/source/basesource.d.ts generated vendored Normal file
View File

@@ -0,0 +1,29 @@
/**
* @typedef Slice
* @property {number} offset
* @property {number} length
*/
export class BaseSource {
/**
*
* @param {Slice[]} slices
* @returns {ArrayBuffer[]}
*/
fetch(slices: Slice[], signal?: undefined): ArrayBuffer[];
/**
*
* @param {Slice} slice
* @returns {ArrayBuffer}
*/
fetchSlice(slice: Slice): ArrayBuffer;
/**
* Returns the filesize if already determined and null otherwise
*/
get fileSize(): null;
close(): Promise<void>;
}
export type Slice = {
offset: number;
length: number;
};
//# sourceMappingURL=basesource.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"basesource.d.ts","sourceRoot":"","sources":["../../dist-module/source/basesource.js"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;IACE;;;;OAIG;IACH,cAHW,KAAK,EAAE,uBACL,WAAW,EAAE,CAMzB;IAED;;;;OAIG;IACH,kBAHW,KAAK,GACH,WAAW,CAIvB;IAED;;OAEG;IACH,qBAEC;IAED,uBAEC;CACF;;YAnCa,MAAM;YACN,MAAM"}

37
node_modules/geotiff/dist-node/source/basesource.js generated vendored Normal file
View File

@@ -0,0 +1,37 @@
"use strict";
/**
* @typedef Slice
* @property {number} offset
* @property {number} length
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.BaseSource = void 0;
class BaseSource {
/**
*
* @param {Slice[]} slices
* @returns {ArrayBuffer[]}
*/
async fetch(slices, signal = undefined) {
return Promise.all(slices.map((slice) => this.fetchSlice(slice, signal)));
}
/**
*
* @param {Slice} slice
* @returns {ArrayBuffer}
*/
async fetchSlice(slice) {
throw new Error(`fetching of slice ${slice} not possible, not implemented`);
}
/**
* Returns the filesize if already determined and null otherwise
*/
get fileSize() {
return null;
}
async close() {
// no-op by default
}
}
exports.BaseSource = BaseSource;
//# sourceMappingURL=basesource.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"basesource.js","sourceRoot":"","sources":["../../dist-module/source/basesource.js"],"names":[],"mappings":";AAAA;;;;GAIG;;;AAEH,MAAa,UAAU;IACrB;;;;OAIG;IACH,KAAK,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS;QACpC,OAAO,OAAO,CAAC,GAAG,CAChB,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CACtD,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,UAAU,CAAC,KAAK;QACpB,MAAM,IAAI,KAAK,CAAC,qBAAqB,KAAK,gCAAgC,CAAC,CAAC;IAC9E,CAAC;IAED;;OAEG;IACH,IAAI,QAAQ;QACV,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,CAAC,KAAK;QACT,mBAAmB;IACrB,CAAC;CACF;AA/BD,gCA+BC"}

View File

@@ -0,0 +1,45 @@
export class BlockedSource extends BaseSource {
/**
*
* @param {Source} source The underlying source that shall be blocked and cached
* @param {object} options
*/
constructor(source: Source, { blockSize, cacheSize }?: object);
source: Source;
blockSize: any;
blockCache: any;
blockRequests: Map<any, any>;
blockIdsToFetch: Set<any>;
/**
*
* @param {AbortSignal} signal
*/
fetchBlocks(signal: AbortSignal): void;
/**
*
* @param {Set} blockIds
* @returns {BlockGroup[]}
*/
groupBlocks(blockIds: Set<any>): BlockGroup[];
/**
*
* @param {Slice[]} slices
* @param {Map} blocks
*/
readSliceData(slices: Slice[], blocks: Map<any, any>): ArrayBuffer[];
}
import { BaseSource } from "./basesource.js";
declare class BlockGroup {
/**
*
* @param {number} offset
* @param {number} length
* @param {number[]} blockIds
*/
constructor(offset: number, length: number, blockIds: number[]);
offset: number;
length: number;
blockIds: number[];
}
export {};
//# sourceMappingURL=blockedsource.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"blockedsource.d.ts","sourceRoot":"","sources":["../../dist-module/source/blockedsource.js"],"names":[],"mappings":"AAuCA;IACE;;;;OAIG;IACH,uDAFW,MAAM,EAchB;IAVC,eAAoB;IACpB,eAA0B;IAE1B,gBAAkD;IAGlD,6BAA8B;IAG9B,0BAAgC;IAqHlC;;;OAGG;IACH,oBAFW,WAAW,QA6CrB;IAED;;;;OAIG;IACH,iCAFa,UAAU,EAAE,CAiCxB;IAED;;;;OAIG;IACH,sBAHW,OAAO,wCAqCjB;CACF;;AAnRD;IACE;;;;;OAKG;IACH,oBAJW,MAAM,UACN,MAAM,YACN,MAAM,EAAE,EAMlB;IAHC,eAAoB;IACpB,eAAoB;IACpB,mBAAwB;CAE3B"}

266
node_modules/geotiff/dist-node/source/blockedsource.js generated vendored Normal file
View File

@@ -0,0 +1,266 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.BlockedSource = void 0;
const lru_cache_1 = __importDefault(require("lru-cache"));
const basesource_js_1 = require("./basesource.js");
const utils_js_1 = require("../utils.js");
class Block {
/**
*
* @param {number} offset
* @param {number} length
* @param {ArrayBuffer} [data]
*/
constructor(offset, length, data = null) {
this.offset = offset;
this.length = length;
this.data = data;
}
/**
* @returns {number} the top byte border
*/
get top() {
return this.offset + this.length;
}
}
class BlockGroup {
/**
*
* @param {number} offset
* @param {number} length
* @param {number[]} blockIds
*/
constructor(offset, length, blockIds) {
this.offset = offset;
this.length = length;
this.blockIds = blockIds;
}
}
class BlockedSource extends basesource_js_1.BaseSource {
/**
*
* @param {Source} source The underlying source that shall be blocked and cached
* @param {object} options
*/
constructor(source, { blockSize = 65536, cacheSize = 100 } = {}) {
super();
this.source = source;
this.blockSize = blockSize;
this.blockCache = new lru_cache_1.default({ max: cacheSize });
// mapping blockId -> Block instance
this.blockRequests = new Map();
// set of blockIds missing for the current requests
this.blockIdsToFetch = new Set();
}
get fileSize() {
return this.source.fileSize;
}
/**
*
* @param {basesource/Slice[]} slices
*/
async fetch(slices, signal) {
const cachedBlocks = new Map();
const blockRequests = new Map();
const missingBlockIds = new Set();
for (const { offset, length } of slices) {
let top = offset + length;
const { fileSize } = this;
if (fileSize !== null) {
top = Math.min(top, fileSize);
}
const firstBlockOffset = Math.floor(offset / this.blockSize) * this.blockSize;
// chunk the current slice into blocks
for (let current = firstBlockOffset; current < top; current += this.blockSize) {
// check if the block is cached, being requested or still missing
const blockId = Math.floor(current / this.blockSize);
if (this.blockCache.has(blockId)) {
cachedBlocks.set(blockId, this.blockCache.get(blockId));
}
else if (this.blockRequests.has(blockId)) {
blockRequests.set(blockId, this.blockRequests.get(blockId));
}
else if (this.blockIdsToFetch.has(blockId)) {
missingBlockIds.add(blockId);
}
else {
this.blockIdsToFetch.add(blockId);
missingBlockIds.add(blockId);
}
}
}
// allow additional block requests to accumulate
await (0, utils_js_1.wait)();
this.fetchBlocks(signal);
for (const blockId of missingBlockIds) {
const block = this.blockRequests.get(blockId);
const cachedBlock = this.blockCache.get(blockId);
if (block) {
blockRequests.set(blockId, block);
}
else if (cachedBlock) {
cachedBlocks.set(blockId, cachedBlock);
}
else {
throw new Error(`Block ${blockId} is not in the block requests`);
}
}
// actually await all pending requests
let results = await Promise.allSettled(Array.from(blockRequests.values()));
// perform retries if a block was interrupted by a previous signal
if (results.some((result) => result.status === 'rejected')) {
const retriedBlockRequests = new Set();
for (const [blockId, result] of (0, utils_js_1.zip)(blockRequests.keys(), results)) {
const { rejected, reason } = result;
if (rejected) {
// push some blocks back to the to-fetch list if they were
// aborted, but only when a different signal was used
if (reason.name === 'AbortError' && reason.signal !== signal) {
this.blockIdsToFetch.add(blockId);
retriedBlockRequests.add(blockId);
}
}
}
// start the retry of some blocks if required
if (this.blockIdsToFetch.length > 0) {
this.fetchBlocks(signal);
for (const blockId of retriedBlockRequests) {
const block = this.blockRequests.get(blockId);
if (!block) {
throw new Error(`Block ${blockId} is not in the block requests`);
}
blockRequests.set(blockId, block);
}
results = await Promise.allSettled(Array.from(blockRequests.values()));
}
}
// throw an error (either abort error or AggregateError if no abort was done)
if (results.some((result) => result.status === 'rejected')) {
if (signal && signal.aborted) {
throw new utils_js_1.AbortError('Request was aborted');
}
throw new utils_js_1.AggregateError(results.filter((result) => result.status === 'rejected').map((result) => result.reason), 'Request failed');
}
// extract the actual block responses
const values = results.map((result) => result.value);
// create a final Map, with all required blocks for this request to satisfy
const requiredBlocks = new Map((0, utils_js_1.zip)(Array.from(blockRequests.keys()), values));
for (const [blockId, block] of cachedBlocks) {
requiredBlocks.set(blockId, block);
}
// TODO: satisfy each slice
return this.readSliceData(slices, requiredBlocks);
}
/**
*
* @param {AbortSignal} signal
*/
fetchBlocks(signal) {
// check if we still need to
if (this.blockIdsToFetch.size > 0) {
const groups = this.groupBlocks(this.blockIdsToFetch);
// start requesting slices of data
const groupRequests = this.source.fetch(groups, signal);
for (let groupIndex = 0; groupIndex < groups.length; ++groupIndex) {
const group = groups[groupIndex];
for (const blockId of group.blockIds) {
// make an async IIFE for each block
const blockRequest = (async () => {
try {
const response = (await groupRequests)[groupIndex];
const blockOffset = blockId * this.blockSize;
const o = blockOffset - response.offset;
const t = Math.min(o + this.blockSize, response.data.byteLength);
const data = response.data.slice(o, t);
const block = new Block(blockOffset, data.byteLength, data);
this.blockCache.set(blockId, block);
return block;
}
catch (err) {
if (err.name === 'AbortError') {
// store the signal here, we need it to determine later if an
// error was caused by this signal
err.signal = signal;
}
throw err;
}
finally {
this.blockRequests.delete(blockId);
}
})();
this.blockRequests.set(blockId, blockRequest);
}
}
this.blockIdsToFetch.clear();
}
}
/**
*
* @param {Set} blockIds
* @returns {BlockGroup[]}
*/
groupBlocks(blockIds) {
const sortedBlockIds = Array.from(blockIds).sort((a, b) => a - b);
if (sortedBlockIds.length === 0) {
return [];
}
let current = [];
let lastBlockId = null;
const groups = [];
for (const blockId of sortedBlockIds) {
if (lastBlockId === null || lastBlockId + 1 === blockId) {
current.push(blockId);
lastBlockId = blockId;
}
else {
groups.push(new BlockGroup(current[0] * this.blockSize, current.length * this.blockSize, current));
current = [blockId];
lastBlockId = blockId;
}
}
groups.push(new BlockGroup(current[0] * this.blockSize, current.length * this.blockSize, current));
return groups;
}
/**
*
* @param {Slice[]} slices
* @param {Map} blocks
*/
readSliceData(slices, blocks) {
return slices.map((slice) => {
const top = slice.offset + slice.length;
const blockIdLow = Math.floor(slice.offset / this.blockSize);
const blockIdHigh = Math.floor((slice.offset + slice.length) / this.blockSize);
const sliceData = new ArrayBuffer(slice.length);
const sliceView = new Uint8Array(sliceData);
for (let blockId = blockIdLow; blockId <= blockIdHigh; ++blockId) {
const block = blocks.get(blockId);
const delta = block.offset - slice.offset;
const topDelta = block.top - top;
let blockInnerOffset = 0;
let rangeInnerOffset = 0;
let usedBlockLength;
if (delta < 0) {
blockInnerOffset = -delta;
}
else if (delta > 0) {
rangeInnerOffset = delta;
}
if (topDelta < 0) {
usedBlockLength = block.length - blockInnerOffset;
}
else {
usedBlockLength = top - block.offset - blockInnerOffset;
}
const blockView = new Uint8Array(block.data, blockInnerOffset, usedBlockLength);
sliceView.set(blockView, rangeInnerOffset);
}
return sliceData;
});
}
}
exports.BlockedSource = BlockedSource;
//# sourceMappingURL=blockedsource.js.map

File diff suppressed because one or more lines are too long

30
node_modules/geotiff/dist-node/source/client/base.d.ts generated vendored Normal file
View File

@@ -0,0 +1,30 @@
export class BaseResponse {
/**
* Returns whether the response has an ok'ish status code
*/
get ok(): boolean;
/**
* Returns the status code of the response
*/
get status(): void;
/**
* Returns the value of the specified header
* @param {string} headerName the header name
* @returns {string} the header value
*/
getHeader(headerName: string): string;
/**
* @returns {ArrayBuffer} the response data of the request
*/
getData(): ArrayBuffer;
}
export class BaseClient {
constructor(url: any);
url: any;
/**
* Send a request with the options
* @param {object} [options]
*/
request({ headers, credentials, signal }?: object): Promise<void>;
}
//# sourceMappingURL=base.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"base.d.ts","sourceRoot":"","sources":["../../../dist-module/source/client/base.js"],"names":[],"mappings":"AAAA;IACE;;OAEG;IACH,kBAEC;IAED;;OAEG;IACH,mBAEC;IAED;;;;OAIG;IACH,sBAHW,MAAM,GACJ,MAAM,CAIlB;IAED;;OAEG;IACH,WAFa,WAAW,CAIvB;CACF;AAED;IACE,sBAEC;IADC,SAAc;IAGhB;;;OAGG;IACH,2CAFW,MAAM,iBAIhB;CACF"}

46
node_modules/geotiff/dist-node/source/client/base.js generated vendored Normal file
View File

@@ -0,0 +1,46 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BaseClient = exports.BaseResponse = void 0;
class BaseResponse {
/**
* Returns whether the response has an ok'ish status code
*/
get ok() {
return this.status >= 200 && this.status <= 299;
}
/**
* Returns the status code of the response
*/
get status() {
throw new Error('not implemented');
}
/**
* Returns the value of the specified header
* @param {string} headerName the header name
* @returns {string} the header value
*/
getHeader(headerName) {
throw new Error('not implemented');
}
/**
* @returns {ArrayBuffer} the response data of the request
*/
async getData() {
throw new Error('not implemented');
}
}
exports.BaseResponse = BaseResponse;
class BaseClient {
constructor(url) {
this.url = url;
}
/**
* Send a request with the options
* @param {object} [options]
*/
async request({ headers, credentials, signal } = {}) {
throw new Error('request is not implemented');
}
}
exports.BaseClient = BaseClient;
//# sourceMappingURL=base.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"base.js","sourceRoot":"","sources":["../../../dist-module/source/client/base.js"],"names":[],"mappings":";;;AAAA,MAAa,YAAY;IACvB;;OAEG;IACH,IAAI,EAAE;QACJ,OAAO,IAAI,CAAC,MAAM,IAAI,GAAG,IAAI,IAAI,CAAC,MAAM,IAAI,GAAG,CAAC;IAClD,CAAC;IAED;;OAEG;IACH,IAAI,MAAM;QACR,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,SAAS,CAAC,UAAU;QAClB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;IACrC,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,OAAO;QACX,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;IACrC,CAAC;CACF;AA9BD,oCA8BC;AAED,MAAa,UAAU;IACrB,YAAY,GAAG;QACb,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;IACjB,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,OAAO,CAAC,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,EAAE;QACjD,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;IAChD,CAAC;CACF;AAZD,gCAYC"}

View File

@@ -0,0 +1,6 @@
export class FetchClient extends BaseClient {
constructor(url: any, credentials: any);
credentials: any;
}
import { BaseClient } from "./base.js";
//# sourceMappingURL=fetch.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"fetch.d.ts","sourceRoot":"","sources":["../../../dist-module/source/client/fetch.js"],"names":[],"mappings":"AA4BA;IACE,wCAGC;IADC,iBAA8B;CASjC"}

40
node_modules/geotiff/dist-node/source/client/fetch.js generated vendored Normal file
View File

@@ -0,0 +1,40 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FetchClient = void 0;
const base_js_1 = require("./base.js");
class FetchResponse extends base_js_1.BaseResponse {
/**
* BaseResponse facade for fetch API Response
* @param {Response} response
*/
constructor(response) {
super();
this.response = response;
}
get status() {
return this.response.status;
}
getHeader(name) {
return this.response.headers.get(name);
}
async getData() {
const data = this.response.arrayBuffer
? await this.response.arrayBuffer()
: (await this.response.buffer()).buffer;
return data;
}
}
class FetchClient extends base_js_1.BaseClient {
constructor(url, credentials) {
super(url);
this.credentials = credentials;
}
async request({ headers, credentials, signal } = {}) {
const response = await fetch(this.url, {
headers, credentials, signal,
});
return new FetchResponse(response);
}
}
exports.FetchClient = FetchClient;
//# sourceMappingURL=fetch.js.map

Some files were not shown because too many files have changed in this diff Show More