This commit is contained in:
8
node_modules/geotiff/dist-node/source/arraybuffer.d.ts
generated
vendored
Normal file
8
node_modules/geotiff/dist-node/source/arraybuffer.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export function makeBufferSource(arrayBuffer: any): ArrayBufferSource;
|
||||
declare class ArrayBufferSource extends BaseSource {
|
||||
constructor(arrayBuffer: any);
|
||||
arrayBuffer: any;
|
||||
}
|
||||
import { BaseSource } from "./basesource.js";
|
||||
export {};
|
||||
//# sourceMappingURL=arraybuffer.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/arraybuffer.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/arraybuffer.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"arraybuffer.d.ts","sourceRoot":"","sources":["../../dist-module/source/arraybuffer.js"],"names":[],"mappings":"AAiBA,sEAEC;AAhBD;IACE,8BAGC;IADC,iBAA8B;CASjC"}
|
||||
22
node_modules/geotiff/dist-node/source/arraybuffer.js
generated
vendored
Normal file
22
node_modules/geotiff/dist-node/source/arraybuffer.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.makeBufferSource = void 0;
|
||||
const basesource_js_1 = require("./basesource.js");
|
||||
const utils_js_1 = require("../utils.js");
|
||||
class ArrayBufferSource extends basesource_js_1.BaseSource {
|
||||
constructor(arrayBuffer) {
|
||||
super();
|
||||
this.arrayBuffer = arrayBuffer;
|
||||
}
|
||||
fetchSlice(slice, signal) {
|
||||
if (signal && signal.aborted) {
|
||||
throw new utils_js_1.AbortError('Request aborted');
|
||||
}
|
||||
return this.arrayBuffer.slice(slice.offset, slice.offset + slice.length);
|
||||
}
|
||||
}
|
||||
function makeBufferSource(arrayBuffer) {
|
||||
return new ArrayBufferSource(arrayBuffer);
|
||||
}
|
||||
exports.makeBufferSource = makeBufferSource;
|
||||
//# sourceMappingURL=arraybuffer.js.map
|
||||
1
node_modules/geotiff/dist-node/source/arraybuffer.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/arraybuffer.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"arraybuffer.js","sourceRoot":"","sources":["../../dist-module/source/arraybuffer.js"],"names":[],"mappings":";;;AAAA,mDAA6C;AAC7C,0CAAyC;AAEzC,MAAM,iBAAkB,SAAQ,0BAAU;IACxC,YAAY,WAAW;QACrB,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;IACjC,CAAC;IAED,UAAU,CAAC,KAAK,EAAE,MAAM;QACtB,IAAI,MAAM,IAAI,MAAM,CAAC,OAAO,EAAE;YAC5B,MAAM,IAAI,qBAAU,CAAC,iBAAiB,CAAC,CAAC;SACzC;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;IAC3E,CAAC;CACF;AAED,SAAgB,gBAAgB,CAAC,WAAW;IAC1C,OAAO,IAAI,iBAAiB,CAAC,WAAW,CAAC,CAAC;AAC5C,CAAC;AAFD,4CAEC"}
|
||||
29
node_modules/geotiff/dist-node/source/basesource.d.ts
generated
vendored
Normal file
29
node_modules/geotiff/dist-node/source/basesource.d.ts
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* @typedef Slice
|
||||
* @property {number} offset
|
||||
* @property {number} length
|
||||
*/
|
||||
export class BaseSource {
|
||||
/**
|
||||
*
|
||||
* @param {Slice[]} slices
|
||||
* @returns {ArrayBuffer[]}
|
||||
*/
|
||||
fetch(slices: Slice[], signal?: undefined): ArrayBuffer[];
|
||||
/**
|
||||
*
|
||||
* @param {Slice} slice
|
||||
* @returns {ArrayBuffer}
|
||||
*/
|
||||
fetchSlice(slice: Slice): ArrayBuffer;
|
||||
/**
|
||||
* Returns the filesize if already determined and null otherwise
|
||||
*/
|
||||
get fileSize(): null;
|
||||
close(): Promise<void>;
|
||||
}
|
||||
export type Slice = {
|
||||
offset: number;
|
||||
length: number;
|
||||
};
|
||||
//# sourceMappingURL=basesource.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/basesource.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/basesource.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"basesource.d.ts","sourceRoot":"","sources":["../../dist-module/source/basesource.js"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;IACE;;;;OAIG;IACH,cAHW,KAAK,EAAE,uBACL,WAAW,EAAE,CAMzB;IAED;;;;OAIG;IACH,kBAHW,KAAK,GACH,WAAW,CAIvB;IAED;;OAEG;IACH,qBAEC;IAED,uBAEC;CACF;;YAnCa,MAAM;YACN,MAAM"}
|
||||
37
node_modules/geotiff/dist-node/source/basesource.js
generated
vendored
Normal file
37
node_modules/geotiff/dist-node/source/basesource.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
/**
|
||||
* @typedef Slice
|
||||
* @property {number} offset
|
||||
* @property {number} length
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BaseSource = void 0;
|
||||
class BaseSource {
|
||||
/**
|
||||
*
|
||||
* @param {Slice[]} slices
|
||||
* @returns {ArrayBuffer[]}
|
||||
*/
|
||||
async fetch(slices, signal = undefined) {
|
||||
return Promise.all(slices.map((slice) => this.fetchSlice(slice, signal)));
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param {Slice} slice
|
||||
* @returns {ArrayBuffer}
|
||||
*/
|
||||
async fetchSlice(slice) {
|
||||
throw new Error(`fetching of slice ${slice} not possible, not implemented`);
|
||||
}
|
||||
/**
|
||||
* Returns the filesize if already determined and null otherwise
|
||||
*/
|
||||
get fileSize() {
|
||||
return null;
|
||||
}
|
||||
async close() {
|
||||
// no-op by default
|
||||
}
|
||||
}
|
||||
exports.BaseSource = BaseSource;
|
||||
//# sourceMappingURL=basesource.js.map
|
||||
1
node_modules/geotiff/dist-node/source/basesource.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/basesource.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"basesource.js","sourceRoot":"","sources":["../../dist-module/source/basesource.js"],"names":[],"mappings":";AAAA;;;;GAIG;;;AAEH,MAAa,UAAU;IACrB;;;;OAIG;IACH,KAAK,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS;QACpC,OAAO,OAAO,CAAC,GAAG,CAChB,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CACtD,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,UAAU,CAAC,KAAK;QACpB,MAAM,IAAI,KAAK,CAAC,qBAAqB,KAAK,gCAAgC,CAAC,CAAC;IAC9E,CAAC;IAED;;OAEG;IACH,IAAI,QAAQ;QACV,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,CAAC,KAAK;QACT,mBAAmB;IACrB,CAAC;CACF;AA/BD,gCA+BC"}
|
||||
45
node_modules/geotiff/dist-node/source/blockedsource.d.ts
generated
vendored
Normal file
45
node_modules/geotiff/dist-node/source/blockedsource.d.ts
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
export class BlockedSource extends BaseSource {
|
||||
/**
|
||||
*
|
||||
* @param {Source} source The underlying source that shall be blocked and cached
|
||||
* @param {object} options
|
||||
*/
|
||||
constructor(source: Source, { blockSize, cacheSize }?: object);
|
||||
source: Source;
|
||||
blockSize: any;
|
||||
blockCache: any;
|
||||
blockRequests: Map<any, any>;
|
||||
blockIdsToFetch: Set<any>;
|
||||
/**
|
||||
*
|
||||
* @param {AbortSignal} signal
|
||||
*/
|
||||
fetchBlocks(signal: AbortSignal): void;
|
||||
/**
|
||||
*
|
||||
* @param {Set} blockIds
|
||||
* @returns {BlockGroup[]}
|
||||
*/
|
||||
groupBlocks(blockIds: Set<any>): BlockGroup[];
|
||||
/**
|
||||
*
|
||||
* @param {Slice[]} slices
|
||||
* @param {Map} blocks
|
||||
*/
|
||||
readSliceData(slices: Slice[], blocks: Map<any, any>): ArrayBuffer[];
|
||||
}
|
||||
import { BaseSource } from "./basesource.js";
|
||||
declare class BlockGroup {
|
||||
/**
|
||||
*
|
||||
* @param {number} offset
|
||||
* @param {number} length
|
||||
* @param {number[]} blockIds
|
||||
*/
|
||||
constructor(offset: number, length: number, blockIds: number[]);
|
||||
offset: number;
|
||||
length: number;
|
||||
blockIds: number[];
|
||||
}
|
||||
export {};
|
||||
//# sourceMappingURL=blockedsource.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/blockedsource.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/blockedsource.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"blockedsource.d.ts","sourceRoot":"","sources":["../../dist-module/source/blockedsource.js"],"names":[],"mappings":"AAuCA;IACE;;;;OAIG;IACH,uDAFW,MAAM,EAchB;IAVC,eAAoB;IACpB,eAA0B;IAE1B,gBAAkD;IAGlD,6BAA8B;IAG9B,0BAAgC;IAqHlC;;;OAGG;IACH,oBAFW,WAAW,QA6CrB;IAED;;;;OAIG;IACH,iCAFa,UAAU,EAAE,CAiCxB;IAED;;;;OAIG;IACH,sBAHW,OAAO,wCAqCjB;CACF;;AAnRD;IACE;;;;;OAKG;IACH,oBAJW,MAAM,UACN,MAAM,YACN,MAAM,EAAE,EAMlB;IAHC,eAAoB;IACpB,eAAoB;IACpB,mBAAwB;CAE3B"}
|
||||
266
node_modules/geotiff/dist-node/source/blockedsource.js
generated
vendored
Normal file
266
node_modules/geotiff/dist-node/source/blockedsource.js
generated
vendored
Normal file
@@ -0,0 +1,266 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BlockedSource = void 0;
|
||||
const lru_cache_1 = __importDefault(require("lru-cache"));
|
||||
const basesource_js_1 = require("./basesource.js");
|
||||
const utils_js_1 = require("../utils.js");
|
||||
class Block {
|
||||
/**
|
||||
*
|
||||
* @param {number} offset
|
||||
* @param {number} length
|
||||
* @param {ArrayBuffer} [data]
|
||||
*/
|
||||
constructor(offset, length, data = null) {
|
||||
this.offset = offset;
|
||||
this.length = length;
|
||||
this.data = data;
|
||||
}
|
||||
/**
|
||||
* @returns {number} the top byte border
|
||||
*/
|
||||
get top() {
|
||||
return this.offset + this.length;
|
||||
}
|
||||
}
|
||||
class BlockGroup {
|
||||
/**
|
||||
*
|
||||
* @param {number} offset
|
||||
* @param {number} length
|
||||
* @param {number[]} blockIds
|
||||
*/
|
||||
constructor(offset, length, blockIds) {
|
||||
this.offset = offset;
|
||||
this.length = length;
|
||||
this.blockIds = blockIds;
|
||||
}
|
||||
}
|
||||
class BlockedSource extends basesource_js_1.BaseSource {
|
||||
/**
|
||||
*
|
||||
* @param {Source} source The underlying source that shall be blocked and cached
|
||||
* @param {object} options
|
||||
*/
|
||||
constructor(source, { blockSize = 65536, cacheSize = 100 } = {}) {
|
||||
super();
|
||||
this.source = source;
|
||||
this.blockSize = blockSize;
|
||||
this.blockCache = new lru_cache_1.default({ max: cacheSize });
|
||||
// mapping blockId -> Block instance
|
||||
this.blockRequests = new Map();
|
||||
// set of blockIds missing for the current requests
|
||||
this.blockIdsToFetch = new Set();
|
||||
}
|
||||
get fileSize() {
|
||||
return this.source.fileSize;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param {basesource/Slice[]} slices
|
||||
*/
|
||||
async fetch(slices, signal) {
|
||||
const cachedBlocks = new Map();
|
||||
const blockRequests = new Map();
|
||||
const missingBlockIds = new Set();
|
||||
for (const { offset, length } of slices) {
|
||||
let top = offset + length;
|
||||
const { fileSize } = this;
|
||||
if (fileSize !== null) {
|
||||
top = Math.min(top, fileSize);
|
||||
}
|
||||
const firstBlockOffset = Math.floor(offset / this.blockSize) * this.blockSize;
|
||||
// chunk the current slice into blocks
|
||||
for (let current = firstBlockOffset; current < top; current += this.blockSize) {
|
||||
// check if the block is cached, being requested or still missing
|
||||
const blockId = Math.floor(current / this.blockSize);
|
||||
if (this.blockCache.has(blockId)) {
|
||||
cachedBlocks.set(blockId, this.blockCache.get(blockId));
|
||||
}
|
||||
else if (this.blockRequests.has(blockId)) {
|
||||
blockRequests.set(blockId, this.blockRequests.get(blockId));
|
||||
}
|
||||
else if (this.blockIdsToFetch.has(blockId)) {
|
||||
missingBlockIds.add(blockId);
|
||||
}
|
||||
else {
|
||||
this.blockIdsToFetch.add(blockId);
|
||||
missingBlockIds.add(blockId);
|
||||
}
|
||||
}
|
||||
}
|
||||
// allow additional block requests to accumulate
|
||||
await (0, utils_js_1.wait)();
|
||||
this.fetchBlocks(signal);
|
||||
for (const blockId of missingBlockIds) {
|
||||
const block = this.blockRequests.get(blockId);
|
||||
const cachedBlock = this.blockCache.get(blockId);
|
||||
if (block) {
|
||||
blockRequests.set(blockId, block);
|
||||
}
|
||||
else if (cachedBlock) {
|
||||
cachedBlocks.set(blockId, cachedBlock);
|
||||
}
|
||||
else {
|
||||
throw new Error(`Block ${blockId} is not in the block requests`);
|
||||
}
|
||||
}
|
||||
// actually await all pending requests
|
||||
let results = await Promise.allSettled(Array.from(blockRequests.values()));
|
||||
// perform retries if a block was interrupted by a previous signal
|
||||
if (results.some((result) => result.status === 'rejected')) {
|
||||
const retriedBlockRequests = new Set();
|
||||
for (const [blockId, result] of (0, utils_js_1.zip)(blockRequests.keys(), results)) {
|
||||
const { rejected, reason } = result;
|
||||
if (rejected) {
|
||||
// push some blocks back to the to-fetch list if they were
|
||||
// aborted, but only when a different signal was used
|
||||
if (reason.name === 'AbortError' && reason.signal !== signal) {
|
||||
this.blockIdsToFetch.add(blockId);
|
||||
retriedBlockRequests.add(blockId);
|
||||
}
|
||||
}
|
||||
}
|
||||
// start the retry of some blocks if required
|
||||
if (this.blockIdsToFetch.length > 0) {
|
||||
this.fetchBlocks(signal);
|
||||
for (const blockId of retriedBlockRequests) {
|
||||
const block = this.blockRequests.get(blockId);
|
||||
if (!block) {
|
||||
throw new Error(`Block ${blockId} is not in the block requests`);
|
||||
}
|
||||
blockRequests.set(blockId, block);
|
||||
}
|
||||
results = await Promise.allSettled(Array.from(blockRequests.values()));
|
||||
}
|
||||
}
|
||||
// throw an error (either abort error or AggregateError if no abort was done)
|
||||
if (results.some((result) => result.status === 'rejected')) {
|
||||
if (signal && signal.aborted) {
|
||||
throw new utils_js_1.AbortError('Request was aborted');
|
||||
}
|
||||
throw new utils_js_1.AggregateError(results.filter((result) => result.status === 'rejected').map((result) => result.reason), 'Request failed');
|
||||
}
|
||||
// extract the actual block responses
|
||||
const values = results.map((result) => result.value);
|
||||
// create a final Map, with all required blocks for this request to satisfy
|
||||
const requiredBlocks = new Map((0, utils_js_1.zip)(Array.from(blockRequests.keys()), values));
|
||||
for (const [blockId, block] of cachedBlocks) {
|
||||
requiredBlocks.set(blockId, block);
|
||||
}
|
||||
// TODO: satisfy each slice
|
||||
return this.readSliceData(slices, requiredBlocks);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param {AbortSignal} signal
|
||||
*/
|
||||
fetchBlocks(signal) {
|
||||
// check if we still need to
|
||||
if (this.blockIdsToFetch.size > 0) {
|
||||
const groups = this.groupBlocks(this.blockIdsToFetch);
|
||||
// start requesting slices of data
|
||||
const groupRequests = this.source.fetch(groups, signal);
|
||||
for (let groupIndex = 0; groupIndex < groups.length; ++groupIndex) {
|
||||
const group = groups[groupIndex];
|
||||
for (const blockId of group.blockIds) {
|
||||
// make an async IIFE for each block
|
||||
const blockRequest = (async () => {
|
||||
try {
|
||||
const response = (await groupRequests)[groupIndex];
|
||||
const blockOffset = blockId * this.blockSize;
|
||||
const o = blockOffset - response.offset;
|
||||
const t = Math.min(o + this.blockSize, response.data.byteLength);
|
||||
const data = response.data.slice(o, t);
|
||||
const block = new Block(blockOffset, data.byteLength, data);
|
||||
this.blockCache.set(blockId, block);
|
||||
return block;
|
||||
}
|
||||
catch (err) {
|
||||
if (err.name === 'AbortError') {
|
||||
// store the signal here, we need it to determine later if an
|
||||
// error was caused by this signal
|
||||
err.signal = signal;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
finally {
|
||||
this.blockRequests.delete(blockId);
|
||||
}
|
||||
})();
|
||||
this.blockRequests.set(blockId, blockRequest);
|
||||
}
|
||||
}
|
||||
this.blockIdsToFetch.clear();
|
||||
}
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param {Set} blockIds
|
||||
* @returns {BlockGroup[]}
|
||||
*/
|
||||
groupBlocks(blockIds) {
|
||||
const sortedBlockIds = Array.from(blockIds).sort((a, b) => a - b);
|
||||
if (sortedBlockIds.length === 0) {
|
||||
return [];
|
||||
}
|
||||
let current = [];
|
||||
let lastBlockId = null;
|
||||
const groups = [];
|
||||
for (const blockId of sortedBlockIds) {
|
||||
if (lastBlockId === null || lastBlockId + 1 === blockId) {
|
||||
current.push(blockId);
|
||||
lastBlockId = blockId;
|
||||
}
|
||||
else {
|
||||
groups.push(new BlockGroup(current[0] * this.blockSize, current.length * this.blockSize, current));
|
||||
current = [blockId];
|
||||
lastBlockId = blockId;
|
||||
}
|
||||
}
|
||||
groups.push(new BlockGroup(current[0] * this.blockSize, current.length * this.blockSize, current));
|
||||
return groups;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param {Slice[]} slices
|
||||
* @param {Map} blocks
|
||||
*/
|
||||
readSliceData(slices, blocks) {
|
||||
return slices.map((slice) => {
|
||||
const top = slice.offset + slice.length;
|
||||
const blockIdLow = Math.floor(slice.offset / this.blockSize);
|
||||
const blockIdHigh = Math.floor((slice.offset + slice.length) / this.blockSize);
|
||||
const sliceData = new ArrayBuffer(slice.length);
|
||||
const sliceView = new Uint8Array(sliceData);
|
||||
for (let blockId = blockIdLow; blockId <= blockIdHigh; ++blockId) {
|
||||
const block = blocks.get(blockId);
|
||||
const delta = block.offset - slice.offset;
|
||||
const topDelta = block.top - top;
|
||||
let blockInnerOffset = 0;
|
||||
let rangeInnerOffset = 0;
|
||||
let usedBlockLength;
|
||||
if (delta < 0) {
|
||||
blockInnerOffset = -delta;
|
||||
}
|
||||
else if (delta > 0) {
|
||||
rangeInnerOffset = delta;
|
||||
}
|
||||
if (topDelta < 0) {
|
||||
usedBlockLength = block.length - blockInnerOffset;
|
||||
}
|
||||
else {
|
||||
usedBlockLength = top - block.offset - blockInnerOffset;
|
||||
}
|
||||
const blockView = new Uint8Array(block.data, blockInnerOffset, usedBlockLength);
|
||||
sliceView.set(blockView, rangeInnerOffset);
|
||||
}
|
||||
return sliceData;
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.BlockedSource = BlockedSource;
|
||||
//# sourceMappingURL=blockedsource.js.map
|
||||
1
node_modules/geotiff/dist-node/source/blockedsource.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/blockedsource.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
30
node_modules/geotiff/dist-node/source/client/base.d.ts
generated
vendored
Normal file
30
node_modules/geotiff/dist-node/source/client/base.d.ts
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
export class BaseResponse {
|
||||
/**
|
||||
* Returns whether the response has an ok'ish status code
|
||||
*/
|
||||
get ok(): boolean;
|
||||
/**
|
||||
* Returns the status code of the response
|
||||
*/
|
||||
get status(): void;
|
||||
/**
|
||||
* Returns the value of the specified header
|
||||
* @param {string} headerName the header name
|
||||
* @returns {string} the header value
|
||||
*/
|
||||
getHeader(headerName: string): string;
|
||||
/**
|
||||
* @returns {ArrayBuffer} the response data of the request
|
||||
*/
|
||||
getData(): ArrayBuffer;
|
||||
}
|
||||
export class BaseClient {
|
||||
constructor(url: any);
|
||||
url: any;
|
||||
/**
|
||||
* Send a request with the options
|
||||
* @param {object} [options]
|
||||
*/
|
||||
request({ headers, credentials, signal }?: object): Promise<void>;
|
||||
}
|
||||
//# sourceMappingURL=base.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/client/base.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/client/base.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"base.d.ts","sourceRoot":"","sources":["../../../dist-module/source/client/base.js"],"names":[],"mappings":"AAAA;IACE;;OAEG;IACH,kBAEC;IAED;;OAEG;IACH,mBAEC;IAED;;;;OAIG;IACH,sBAHW,MAAM,GACJ,MAAM,CAIlB;IAED;;OAEG;IACH,WAFa,WAAW,CAIvB;CACF;AAED;IACE,sBAEC;IADC,SAAc;IAGhB;;;OAGG;IACH,2CAFW,MAAM,iBAIhB;CACF"}
|
||||
46
node_modules/geotiff/dist-node/source/client/base.js
generated
vendored
Normal file
46
node_modules/geotiff/dist-node/source/client/base.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BaseClient = exports.BaseResponse = void 0;
|
||||
class BaseResponse {
|
||||
/**
|
||||
* Returns whether the response has an ok'ish status code
|
||||
*/
|
||||
get ok() {
|
||||
return this.status >= 200 && this.status <= 299;
|
||||
}
|
||||
/**
|
||||
* Returns the status code of the response
|
||||
*/
|
||||
get status() {
|
||||
throw new Error('not implemented');
|
||||
}
|
||||
/**
|
||||
* Returns the value of the specified header
|
||||
* @param {string} headerName the header name
|
||||
* @returns {string} the header value
|
||||
*/
|
||||
getHeader(headerName) {
|
||||
throw new Error('not implemented');
|
||||
}
|
||||
/**
|
||||
* @returns {ArrayBuffer} the response data of the request
|
||||
*/
|
||||
async getData() {
|
||||
throw new Error('not implemented');
|
||||
}
|
||||
}
|
||||
exports.BaseResponse = BaseResponse;
|
||||
class BaseClient {
|
||||
constructor(url) {
|
||||
this.url = url;
|
||||
}
|
||||
/**
|
||||
* Send a request with the options
|
||||
* @param {object} [options]
|
||||
*/
|
||||
async request({ headers, credentials, signal } = {}) {
|
||||
throw new Error('request is not implemented');
|
||||
}
|
||||
}
|
||||
exports.BaseClient = BaseClient;
|
||||
//# sourceMappingURL=base.js.map
|
||||
1
node_modules/geotiff/dist-node/source/client/base.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/client/base.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"base.js","sourceRoot":"","sources":["../../../dist-module/source/client/base.js"],"names":[],"mappings":";;;AAAA,MAAa,YAAY;IACvB;;OAEG;IACH,IAAI,EAAE;QACJ,OAAO,IAAI,CAAC,MAAM,IAAI,GAAG,IAAI,IAAI,CAAC,MAAM,IAAI,GAAG,CAAC;IAClD,CAAC;IAED;;OAEG;IACH,IAAI,MAAM;QACR,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,SAAS,CAAC,UAAU;QAClB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;IACrC,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,OAAO;QACX,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;IACrC,CAAC;CACF;AA9BD,oCA8BC;AAED,MAAa,UAAU;IACrB,YAAY,GAAG;QACb,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;IACjB,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,OAAO,CAAC,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,EAAE;QACjD,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;IAChD,CAAC;CACF;AAZD,gCAYC"}
|
||||
6
node_modules/geotiff/dist-node/source/client/fetch.d.ts
generated
vendored
Normal file
6
node_modules/geotiff/dist-node/source/client/fetch.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export class FetchClient extends BaseClient {
|
||||
constructor(url: any, credentials: any);
|
||||
credentials: any;
|
||||
}
|
||||
import { BaseClient } from "./base.js";
|
||||
//# sourceMappingURL=fetch.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/client/fetch.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/client/fetch.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"fetch.d.ts","sourceRoot":"","sources":["../../../dist-module/source/client/fetch.js"],"names":[],"mappings":"AA4BA;IACE,wCAGC;IADC,iBAA8B;CASjC"}
|
||||
40
node_modules/geotiff/dist-node/source/client/fetch.js
generated
vendored
Normal file
40
node_modules/geotiff/dist-node/source/client/fetch.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.FetchClient = void 0;
|
||||
const base_js_1 = require("./base.js");
|
||||
class FetchResponse extends base_js_1.BaseResponse {
|
||||
/**
|
||||
* BaseResponse facade for fetch API Response
|
||||
* @param {Response} response
|
||||
*/
|
||||
constructor(response) {
|
||||
super();
|
||||
this.response = response;
|
||||
}
|
||||
get status() {
|
||||
return this.response.status;
|
||||
}
|
||||
getHeader(name) {
|
||||
return this.response.headers.get(name);
|
||||
}
|
||||
async getData() {
|
||||
const data = this.response.arrayBuffer
|
||||
? await this.response.arrayBuffer()
|
||||
: (await this.response.buffer()).buffer;
|
||||
return data;
|
||||
}
|
||||
}
|
||||
class FetchClient extends base_js_1.BaseClient {
|
||||
constructor(url, credentials) {
|
||||
super(url);
|
||||
this.credentials = credentials;
|
||||
}
|
||||
async request({ headers, credentials, signal } = {}) {
|
||||
const response = await fetch(this.url, {
|
||||
headers, credentials, signal,
|
||||
});
|
||||
return new FetchResponse(response);
|
||||
}
|
||||
}
|
||||
exports.FetchClient = FetchClient;
|
||||
//# sourceMappingURL=fetch.js.map
|
||||
1
node_modules/geotiff/dist-node/source/client/fetch.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/client/fetch.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"fetch.js","sourceRoot":"","sources":["../../../dist-module/source/client/fetch.js"],"names":[],"mappings":";;;AAAA,uCAAqD;AAErD,MAAM,aAAc,SAAQ,sBAAY;IACtC;;;OAGG;IACH,YAAY,QAAQ;QAClB,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;IAC3B,CAAC;IAED,IAAI,MAAM;QACR,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC;IAC9B,CAAC;IAED,SAAS,CAAC,IAAI;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;IACzC,CAAC;IAED,KAAK,CAAC,OAAO;QACX,MAAM,IAAI,GAAG,IAAI,CAAC,QAAQ,CAAC,WAAW;YACpC,CAAC,CAAC,MAAM,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE;YACnC,CAAC,CAAC,CAAC,MAAM,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,CAAC;QAC1C,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AAED,MAAa,WAAY,SAAQ,oBAAU;IACzC,YAAY,GAAG,EAAE,WAAW;QAC1B,KAAK,CAAC,GAAG,CAAC,CAAC;QACX,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;IACjC,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,EAAE;QACjD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE;YACrC,OAAO,EAAE,WAAW,EAAE,MAAM;SAC7B,CAAC,CAAC;QACH,OAAO,IAAI,aAAa,CAAC,QAAQ,CAAC,CAAC;IACrC,CAAC;CACF;AAZD,kCAYC"}
|
||||
11
node_modules/geotiff/dist-node/source/client/http.d.ts
generated
vendored
Normal file
11
node_modules/geotiff/dist-node/source/client/http.d.ts
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
/// <reference types="node" />
|
||||
export class HttpClient extends BaseClient {
|
||||
parsedUrl: urlMod.UrlWithStringQuery;
|
||||
httpApi: typeof http | typeof https;
|
||||
constructRequest(headers: any, signal: any): Promise<any>;
|
||||
}
|
||||
import { BaseClient } from "./base.js";
|
||||
import urlMod from "url";
|
||||
import http from "http";
|
||||
import https from "https";
|
||||
//# sourceMappingURL=http.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/client/http.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/client/http.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"http.d.ts","sourceRoot":"","sources":["../../../dist-module/source/client/http.js"],"names":[],"mappings":";AAgCA;IAGI,qCAAuC;IACvC,oCAAmE;IAGrE,0DAmCC;CAMF"}
|
||||
75
node_modules/geotiff/dist-node/source/client/http.js
generated
vendored
Normal file
75
node_modules/geotiff/dist-node/source/client/http.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.HttpClient = void 0;
|
||||
const http_1 = __importDefault(require("http"));
|
||||
const https_1 = __importDefault(require("https"));
|
||||
const url_1 = __importDefault(require("url"));
|
||||
const base_js_1 = require("./base.js");
|
||||
const utils_js_1 = require("../../utils.js");
|
||||
class HttpResponse extends base_js_1.BaseResponse {
|
||||
/**
|
||||
* BaseResponse facade for node HTTP/HTTPS API Response
|
||||
* @param {http.ServerResponse} response
|
||||
*/
|
||||
constructor(response, dataPromise) {
|
||||
super();
|
||||
this.response = response;
|
||||
this.dataPromise = dataPromise;
|
||||
}
|
||||
get status() {
|
||||
return this.response.statusCode;
|
||||
}
|
||||
getHeader(name) {
|
||||
return this.response.headers[name];
|
||||
}
|
||||
async getData() {
|
||||
const data = await this.dataPromise;
|
||||
return data;
|
||||
}
|
||||
}
|
||||
class HttpClient extends base_js_1.BaseClient {
|
||||
constructor(url) {
|
||||
super(url);
|
||||
this.parsedUrl = url_1.default.parse(this.url);
|
||||
this.httpApi = (this.parsedUrl.protocol === 'http:' ? http_1.default : https_1.default);
|
||||
}
|
||||
constructRequest(headers, signal) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const request = this.httpApi.get({
|
||||
...this.parsedUrl,
|
||||
headers,
|
||||
}, (response) => {
|
||||
const dataPromise = new Promise((resolveData) => {
|
||||
const chunks = [];
|
||||
// collect chunks
|
||||
response.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
// concatenate all chunks and resolve the promise with the resulting buffer
|
||||
response.on('end', () => {
|
||||
const data = Buffer.concat(chunks).buffer;
|
||||
resolveData(data);
|
||||
});
|
||||
response.on('error', reject);
|
||||
});
|
||||
resolve(new HttpResponse(response, dataPromise));
|
||||
});
|
||||
request.on('error', reject);
|
||||
if (signal) {
|
||||
if (signal.aborted) {
|
||||
request.destroy(new utils_js_1.AbortError('Request aborted'));
|
||||
}
|
||||
signal.addEventListener('abort', () => request.destroy(new utils_js_1.AbortError('Request aborted')));
|
||||
}
|
||||
});
|
||||
}
|
||||
async request({ headers, signal } = {}) {
|
||||
const response = await this.constructRequest(headers, signal);
|
||||
return response;
|
||||
}
|
||||
}
|
||||
exports.HttpClient = HttpClient;
|
||||
//# sourceMappingURL=http.js.map
|
||||
1
node_modules/geotiff/dist-node/source/client/http.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/client/http.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"http.js","sourceRoot":"","sources":["../../../dist-module/source/client/http.js"],"names":[],"mappings":";;;;;;AAAA,gDAAwB;AACxB,kDAA0B;AAC1B,8CAAyB;AAEzB,uCAAqD;AACrD,6CAA4C;AAE5C,MAAM,YAAa,SAAQ,sBAAY;IACrC;;;OAGG;IACH,YAAY,QAAQ,EAAE,WAAW;QAC/B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;IACjC,CAAC;IAED,IAAI,MAAM;QACR,OAAO,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC;IAClC,CAAC;IAED,SAAS,CAAC,IAAI;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACrC,CAAC;IAED,KAAK,CAAC,OAAO;QACX,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC;QACpC,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AAED,MAAa,UAAW,SAAQ,oBAAU;IACxC,YAAY,GAAG;QACb,KAAK,CAAC,GAAG,CAAC,CAAC;QACX,IAAI,CAAC,SAAS,GAAG,aAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QACxC,IAAI,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,cAAI,CAAC,CAAC,CAAC,eAAK,CAAC,CAAC;IACtE,CAAC;IAED,gBAAgB,CAAC,OAAO,EAAE,MAAM;QAC9B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAC9B;gBACE,GAAG,IAAI,CAAC,SAAS;gBACjB,OAAO;aACR,EACD,CAAC,QAAQ,EAAE,EAAE;gBACX,MAAM,WAAW,GAAG,IAAI,OAAO,CAAC,CAAC,WAAW,EAAE,EAAE;oBAC9C,MAAM,MAAM,GAAG,EAAE,CAAC;oBAElB,iBAAiB;oBACjB,QAAQ,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAK,EAAE,EAAE;wBAC5B,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;oBACrB,CAAC,CAAC,CAAC;oBAEH,2EAA2E;oBAC3E,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;wBACtB,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC;wBAC1C,WAAW,CAAC,IAAI,CAAC,CAAC;oBACpB,CAAC,CAAC,CAAC;oBACH,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;gBAC/B,CAAC,CAAC,CAAC;gBACH,OAAO,CAAC,IAAI,YAAY,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC,CAAC;YACnD,CAAC,CACF,CAAC;YACF,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAE5B,IAAI,MAAM,EAAE;gBACV,IAAI,MAAM,CAAC,OAAO,EAAE;oBAClB,OAAO,CAAC,OAAO,CAAC,IAAI,qBAAU,CAAC,iBAAiB,CAAC,CAAC,CAAC;iBACpD;gBACD,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,qBAAU,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC;aAC5F;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,EAAE;QACpC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC9D,OAAO,QAAQ,CAAC;IAClB,CAAC;CACF;AAhDD,gCAgDC"}
|
||||
5
node_modules/geotiff/dist-node/source/client/xhr.d.ts
generated
vendored
Normal file
5
node_modules/geotiff/dist-node/source/client/xhr.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export class XHRClient extends BaseClient {
|
||||
constructRequest(headers: any, signal: any): Promise<any>;
|
||||
}
|
||||
import { BaseClient } from "./base.js";
|
||||
//# sourceMappingURL=xhr.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/client/xhr.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/client/xhr.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"xhr.d.ts","sourceRoot":"","sources":["../../../dist-module/source/client/xhr.js"],"names":[],"mappings":"AA4BA;IACE,0DAyBC;CAMF"}
|
||||
58
node_modules/geotiff/dist-node/source/client/xhr.js
generated
vendored
Normal file
58
node_modules/geotiff/dist-node/source/client/xhr.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.XHRClient = void 0;
|
||||
const base_js_1 = require("./base.js");
|
||||
const utils_js_1 = require("../../utils.js");
|
||||
class XHRResponse extends base_js_1.BaseResponse {
|
||||
/**
|
||||
* BaseResponse facade for XMLHttpRequest
|
||||
* @param {XMLHttpRequest} xhr
|
||||
* @param {ArrayBuffer} data
|
||||
*/
|
||||
constructor(xhr, data) {
|
||||
super();
|
||||
this.xhr = xhr;
|
||||
this.data = data;
|
||||
}
|
||||
get status() {
|
||||
return this.xhr.status;
|
||||
}
|
||||
getHeader(name) {
|
||||
return this.xhr.getResponseHeader(name);
|
||||
}
|
||||
async getData() {
|
||||
return this.data;
|
||||
}
|
||||
}
|
||||
class XHRClient extends base_js_1.BaseClient {
|
||||
constructRequest(headers, signal) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', this.url);
|
||||
xhr.responseType = 'arraybuffer';
|
||||
for (const [key, value] of Object.entries(headers)) {
|
||||
xhr.setRequestHeader(key, value);
|
||||
}
|
||||
// hook signals
|
||||
xhr.onload = () => {
|
||||
const data = xhr.response;
|
||||
resolve(new XHRResponse(xhr, data));
|
||||
};
|
||||
xhr.onerror = reject;
|
||||
xhr.onabort = () => reject(new utils_js_1.AbortError('Request aborted'));
|
||||
xhr.send();
|
||||
if (signal) {
|
||||
if (signal.aborted) {
|
||||
xhr.abort();
|
||||
}
|
||||
signal.addEventListener('abort', () => xhr.abort());
|
||||
}
|
||||
});
|
||||
}
|
||||
async request({ headers, signal } = {}) {
|
||||
const response = await this.constructRequest(headers, signal);
|
||||
return response;
|
||||
}
|
||||
}
|
||||
exports.XHRClient = XHRClient;
|
||||
//# sourceMappingURL=xhr.js.map
|
||||
1
node_modules/geotiff/dist-node/source/client/xhr.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/client/xhr.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"xhr.js","sourceRoot":"","sources":["../../../dist-module/source/client/xhr.js"],"names":[],"mappings":";;;AAAA,uCAAqD;AACrD,6CAA4C;AAE5C,MAAM,WAAY,SAAQ,sBAAY;IACpC;;;;OAIG;IACH,YAAY,GAAG,EAAE,IAAI;QACnB,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;IACnB,CAAC;IAED,IAAI,MAAM;QACR,OAAO,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC;IACzB,CAAC;IAED,SAAS,CAAC,IAAI;QACZ,OAAO,IAAI,CAAC,GAAG,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;IAC1C,CAAC;IAED,KAAK,CAAC,OAAO;QACX,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;CACF;AAED,MAAa,SAAU,SAAQ,oBAAU;IACvC,gBAAgB,CAAC,OAAO,EAAE,MAAM;QAC9B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,MAAM,GAAG,GAAG,IAAI,cAAc,EAAE,CAAC;YACjC,GAAG,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC;YAC1B,GAAG,CAAC,YAAY,GAAG,aAAa,CAAC;YACjC,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;gBAClD,GAAG,CAAC,gBAAgB,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;aAClC;YAED,eAAe;YACf,GAAG,CAAC,MAAM,GAAG,GAAG,EAAE;gBAChB,MAAM,IAAI,GAAG,GAAG,CAAC,QAAQ,CAAC;gBAC1B,OAAO,CAAC,IAAI,WAAW,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC;YACtC,CAAC,CAAC;YACF,GAAG,CAAC,OAAO,GAAG,MAAM,CAAC;YACrB,GAAG,CAAC,OAAO,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,IAAI,qBAAU,CAAC,iBAAiB,CAAC,CAAC,CAAC;YAC9D,GAAG,CAAC,IAAI,EAAE,CAAC;YAEX,IAAI,MAAM,EAAE;gBACV,IAAI,MAAM,CAAC,OAAO,EAAE;oBAClB,GAAG,CAAC,KAAK,EAAE,CAAC;iBACb;gBACD,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC;aACrD;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,EAAE;QACpC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC9D,OAAO,QAAQ,CAAC;IAClB,CAAC;CACF;AAhCD,8BAgCC"}
|
||||
9
node_modules/geotiff/dist-node/source/file.d.ts
generated
vendored
Normal file
9
node_modules/geotiff/dist-node/source/file.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export function makeFileSource(path: any): FileSource;
|
||||
declare class FileSource extends BaseSource {
|
||||
constructor(path: any);
|
||||
path: any;
|
||||
openRequest: Promise<any>;
|
||||
}
|
||||
import { BaseSource } from "./basesource.js";
|
||||
export {};
|
||||
//# sourceMappingURL=file.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/file.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/file.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../dist-module/source/file.js"],"names":[],"mappings":"AAiEA,sDAEC;AA5BD;IACE,uBAIC;IAFC,UAAgB;IAChB,0BAAuC;CAoB1C"}
|
||||
66
node_modules/geotiff/dist-node/source/file.js
generated
vendored
Normal file
66
node_modules/geotiff/dist-node/source/file.js
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.makeFileSource = void 0;
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const basesource_js_1 = require("./basesource.js");
|
||||
function closeAsync(fd) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs_1.default.close(fd, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
function openAsync(path, flags, mode = undefined) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs_1.default.open(path, flags, mode, (err, fd) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve(fd);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
function readAsync(...args) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs_1.default.read(...args, (err, bytesRead, buffer) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve({ bytesRead, buffer });
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
class FileSource extends basesource_js_1.BaseSource {
|
||||
constructor(path) {
|
||||
super();
|
||||
this.path = path;
|
||||
this.openRequest = openAsync(path, 'r');
|
||||
}
|
||||
async fetchSlice(slice) {
|
||||
// TODO: use `signal`
|
||||
const fd = await this.openRequest;
|
||||
const { buffer } = await readAsync(fd, Buffer.alloc(slice.length), 0, slice.length, slice.offset);
|
||||
return buffer.buffer;
|
||||
}
|
||||
async close() {
|
||||
const fd = await this.openRequest;
|
||||
await closeAsync(fd);
|
||||
}
|
||||
}
|
||||
function makeFileSource(path) {
|
||||
return new FileSource(path);
|
||||
}
|
||||
exports.makeFileSource = makeFileSource;
|
||||
//# sourceMappingURL=file.js.map
|
||||
1
node_modules/geotiff/dist-node/source/file.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/file.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"file.js","sourceRoot":"","sources":["../../dist-module/source/file.js"],"names":[],"mappings":";;;;;;AAAA,4CAAoB;AACpB,mDAA6C;AAE7C,SAAS,UAAU,CAAC,EAAE;IACpB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,YAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,GAAG,EAAE,EAAE;YACnB,IAAI,GAAG,EAAE;gBACP,MAAM,CAAC,GAAG,CAAC,CAAC;aACb;iBAAM;gBACL,OAAO,EAAE,CAAC;aACX;QACH,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,SAAS,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,GAAG,SAAS;IAC9C,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,YAAE,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,GAAG,EAAE,EAAE,EAAE,EAAE;YACrC,IAAI,GAAG,EAAE;gBACP,MAAM,CAAC,GAAG,CAAC,CAAC;aACb;iBAAM;gBACL,OAAO,CAAC,EAAE,CAAC,CAAC;aACb;QACH,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,SAAS,CAAC,GAAG,IAAI;IACxB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,YAAE,CAAC,IAAI,CAAC,GAAG,IAAI,EAAE,CAAC,GAAG,EAAE,SAAS,EAAE,MAAM,EAAE,EAAE;YAC1C,IAAI,GAAG,EAAE;gBACP,MAAM,CAAC,GAAG,CAAC,CAAC;aACb;iBAAM;gBACL,OAAO,CAAC,EAAE,SAAS,EAAE,MAAM,EAAE,CAAC,CAAC;aAChC;QACH,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,UAAW,SAAQ,0BAAU;IACjC,YAAY,IAAI;QACd,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;IAC1C,CAAC;IAED,KAAK,CAAC,UAAU,CAAC,KAAK;QACpB,qBAAqB;QACrB,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC;QAClC,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,SAAS,CAChC,EAAE,EACF,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,EAC1B,CAAC,EACD,KAAK,CAAC,MAAM,EACZ,KAAK,CAAC,MAAM,CACb,CAAC;QACF,OAAO,MAAM,CAAC,MAAM,CAAC;IACvB,CAAC;IAED,KAAK,CAAC,KAAK;QACT,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC;QAClC,MAAM,UAAU,CAAC,EAAE,CAAC,CAAC;IACvB,CAAC;CACF;AAED,SAAgB,cAAc,CAAC,IAAI;IACjC,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC;AAFD,wCAEC"}
|
||||
13
node_modules/geotiff/dist-node/source/filereader.d.ts
generated
vendored
Normal file
13
node_modules/geotiff/dist-node/source/filereader.d.ts
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
/**
|
||||
* Create a new source from a given file/blob.
|
||||
* @param {Blob} file The file or blob to read from.
|
||||
* @returns The constructed source
|
||||
*/
|
||||
export function makeFileReaderSource(file: Blob): FileReaderSource;
|
||||
declare class FileReaderSource extends BaseSource {
|
||||
constructor(file: any);
|
||||
file: any;
|
||||
}
|
||||
import { BaseSource } from "./basesource.js";
|
||||
export {};
|
||||
//# sourceMappingURL=filereader.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/filereader.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/filereader.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"filereader.d.ts","sourceRoot":"","sources":["../../dist-module/source/filereader.js"],"names":[],"mappings":"AAwBA;;;;GAIG;AACH,2CAHW,IAAI,oBAKd;AA7BD;IACE,uBAGC;IADC,UAAgB;CAiBnB"}
|
||||
33
node_modules/geotiff/dist-node/source/filereader.js
generated
vendored
Normal file
33
node_modules/geotiff/dist-node/source/filereader.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.makeFileReaderSource = void 0;
|
||||
const basesource_js_1 = require("./basesource.js");
|
||||
class FileReaderSource extends basesource_js_1.BaseSource {
|
||||
constructor(file) {
|
||||
super();
|
||||
this.file = file;
|
||||
}
|
||||
async fetchSlice(slice, signal) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const blob = this.file.slice(slice.offset, slice.offset + slice.length);
|
||||
const reader = new FileReader();
|
||||
reader.onload = (event) => resolve(event.target.result);
|
||||
reader.onerror = reject;
|
||||
reader.onabort = reject;
|
||||
reader.readAsArrayBuffer(blob);
|
||||
if (signal) {
|
||||
signal.addEventListener('abort', () => reader.abort());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Create a new source from a given file/blob.
|
||||
* @param {Blob} file The file or blob to read from.
|
||||
* @returns The constructed source
|
||||
*/
|
||||
function makeFileReaderSource(file) {
|
||||
return new FileReaderSource(file);
|
||||
}
|
||||
exports.makeFileReaderSource = makeFileReaderSource;
|
||||
//# sourceMappingURL=filereader.js.map
|
||||
1
node_modules/geotiff/dist-node/source/filereader.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/filereader.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"filereader.js","sourceRoot":"","sources":["../../dist-module/source/filereader.js"],"names":[],"mappings":";;;AAAA,mDAA6C;AAE7C,MAAM,gBAAiB,SAAQ,0BAAU;IACvC,YAAY,IAAI;QACd,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;IACnB,CAAC;IAED,KAAK,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM;QAC5B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;YACxE,MAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;YAChC,MAAM,CAAC,MAAM,GAAG,CAAC,KAAK,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YACxD,MAAM,CAAC,OAAO,GAAG,MAAM,CAAC;YACxB,MAAM,CAAC,OAAO,GAAG,MAAM,CAAC;YACxB,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;YAE/B,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC;aACxD;QACH,CAAC,CAAC,CAAC;IACL,CAAC;CACF;AAED;;;;GAIG;AACH,SAAgB,oBAAoB,CAAC,IAAI;IACvC,OAAO,IAAI,gBAAgB,CAAC,IAAI,CAAC,CAAC;AACpC,CAAC;AAFD,oDAEC"}
|
||||
25
node_modules/geotiff/dist-node/source/httputils.d.ts
generated
vendored
Normal file
25
node_modules/geotiff/dist-node/source/httputils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
/**
|
||||
* Parse a 'Content-Type' header value to the content-type and parameters
|
||||
* @param {String} rawContentType the raw string to parse from
|
||||
* @returns {Object} the parsed content type with the fields: type and params
|
||||
*/
|
||||
export function parseContentType(rawContentType: string): any;
|
||||
/**
|
||||
* Parse a 'Content-Range' header value to its start, end, and total parts
|
||||
* @param {String} rawContentRange the raw string to parse from
|
||||
* @returns {Object} the parsed parts
|
||||
*/
|
||||
export function parseContentRange(rawContentRange: string): any;
|
||||
/**
|
||||
* Parses a list of byteranges from the given 'multipart/byteranges' HTTP response.
|
||||
* Each item in the list has the following properties:
|
||||
* - headers: the HTTP headers
|
||||
* - data: the sliced ArrayBuffer for that specific part
|
||||
* - offset: the offset of the byterange within its originating file
|
||||
* - length: the length of the byterange
|
||||
* @param {ArrayBuffer} responseArrayBuffer the response to be parsed and split
|
||||
* @param {String} boundary the boundary string used to split the sections
|
||||
* @returns {Object[]} the parsed byteranges
|
||||
*/
|
||||
export function parseByteRanges(responseArrayBuffer: ArrayBuffer, boundary: string): any[];
|
||||
//# sourceMappingURL=httputils.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/httputils.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/httputils.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"httputils.d.ts","sourceRoot":"","sources":["../../dist-module/source/httputils.js"],"names":[],"mappings":"AAiCA;;;;GAIG;AACH,8DAIC;AAED;;;;GAIG;AACH,gEAaC;AAED;;;;;;;;;;GAUG;AACH,qDAJW,WAAW,qBAET,KAAQ,CAuEpB"}
|
||||
125
node_modules/geotiff/dist-node/source/httputils.js
generated
vendored
Normal file
125
node_modules/geotiff/dist-node/source/httputils.js
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.parseByteRanges = exports.parseContentRange = exports.parseContentType = void 0;
|
||||
const CRLFCRLF = '\r\n\r\n';
|
||||
/*
|
||||
* Shim for 'Object.fromEntries'
|
||||
*/
|
||||
function itemsToObject(items) {
|
||||
if (typeof Object.fromEntries !== 'undefined') {
|
||||
return Object.fromEntries(items);
|
||||
}
|
||||
const obj = {};
|
||||
for (const [key, value] of items) {
|
||||
obj[key.toLowerCase()] = value;
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
/**
|
||||
* Parse HTTP headers from a given string.
|
||||
* @param {String} text the text to parse the headers from
|
||||
* @returns {Object} the parsed headers with lowercase keys
|
||||
*/
|
||||
function parseHeaders(text) {
|
||||
const items = text
|
||||
.split('\r\n')
|
||||
.map((line) => {
|
||||
const kv = line.split(':').map((str) => str.trim());
|
||||
kv[0] = kv[0].toLowerCase();
|
||||
return kv;
|
||||
});
|
||||
return itemsToObject(items);
|
||||
}
|
||||
/**
|
||||
* Parse a 'Content-Type' header value to the content-type and parameters
|
||||
* @param {String} rawContentType the raw string to parse from
|
||||
* @returns {Object} the parsed content type with the fields: type and params
|
||||
*/
|
||||
function parseContentType(rawContentType) {
|
||||
const [type, ...rawParams] = rawContentType.split(';').map((s) => s.trim());
|
||||
const paramsItems = rawParams.map((param) => param.split('='));
|
||||
return { type, params: itemsToObject(paramsItems) };
|
||||
}
|
||||
exports.parseContentType = parseContentType;
|
||||
/**
|
||||
* Parse a 'Content-Range' header value to its start, end, and total parts
|
||||
* @param {String} rawContentRange the raw string to parse from
|
||||
* @returns {Object} the parsed parts
|
||||
*/
|
||||
function parseContentRange(rawContentRange) {
|
||||
let start;
|
||||
let end;
|
||||
let total;
|
||||
if (rawContentRange) {
|
||||
[, start, end, total] = rawContentRange.match(/bytes (\d+)-(\d+)\/(\d+)/);
|
||||
start = parseInt(start, 10);
|
||||
end = parseInt(end, 10);
|
||||
total = parseInt(total, 10);
|
||||
}
|
||||
return { start, end, total };
|
||||
}
|
||||
exports.parseContentRange = parseContentRange;
|
||||
/**
|
||||
* Parses a list of byteranges from the given 'multipart/byteranges' HTTP response.
|
||||
* Each item in the list has the following properties:
|
||||
* - headers: the HTTP headers
|
||||
* - data: the sliced ArrayBuffer for that specific part
|
||||
* - offset: the offset of the byterange within its originating file
|
||||
* - length: the length of the byterange
|
||||
* @param {ArrayBuffer} responseArrayBuffer the response to be parsed and split
|
||||
* @param {String} boundary the boundary string used to split the sections
|
||||
* @returns {Object[]} the parsed byteranges
|
||||
*/
|
||||
function parseByteRanges(responseArrayBuffer, boundary) {
|
||||
let offset = null;
|
||||
const decoder = new TextDecoder('ascii');
|
||||
const out = [];
|
||||
const startBoundary = `--${boundary}`;
|
||||
const endBoundary = `${startBoundary}--`;
|
||||
// search for the initial boundary, may be offset by some bytes
|
||||
// TODO: more efficient to check for `--` in bytes directly
|
||||
for (let i = 0; i < 10; ++i) {
|
||||
const text = decoder.decode(new Uint8Array(responseArrayBuffer, i, startBoundary.length));
|
||||
if (text === startBoundary) {
|
||||
offset = i;
|
||||
}
|
||||
}
|
||||
if (offset === null) {
|
||||
throw new Error('Could not find initial boundary');
|
||||
}
|
||||
while (offset < responseArrayBuffer.byteLength) {
|
||||
const text = decoder.decode(new Uint8Array(responseArrayBuffer, offset, Math.min(startBoundary.length + 1024, responseArrayBuffer.byteLength - offset)));
|
||||
// break if we arrived at the end
|
||||
if (text.length === 0 || text.startsWith(endBoundary)) {
|
||||
break;
|
||||
}
|
||||
// assert that we are actually dealing with a byterange and are at the correct offset
|
||||
if (!text.startsWith(startBoundary)) {
|
||||
throw new Error('Part does not start with boundary');
|
||||
}
|
||||
// get a substring from where we read the headers
|
||||
const innerText = text.substr(startBoundary.length + 2);
|
||||
if (innerText.length === 0) {
|
||||
break;
|
||||
}
|
||||
// find the double linebreak that denotes the end of the headers
|
||||
const endOfHeaders = innerText.indexOf(CRLFCRLF);
|
||||
// parse the headers to get the content range size
|
||||
const headers = parseHeaders(innerText.substr(0, endOfHeaders));
|
||||
const { start, end, total } = parseContentRange(headers['content-range']);
|
||||
// calculate the length of the slice and the next offset
|
||||
const startOfData = offset + startBoundary.length + endOfHeaders + CRLFCRLF.length;
|
||||
const length = parseInt(end, 10) + 1 - parseInt(start, 10);
|
||||
out.push({
|
||||
headers,
|
||||
data: responseArrayBuffer.slice(startOfData, startOfData + length),
|
||||
offset: start,
|
||||
length,
|
||||
fileSize: total,
|
||||
});
|
||||
offset = startOfData + length + 4;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
exports.parseByteRanges = parseByteRanges;
|
||||
//# sourceMappingURL=httputils.js.map
|
||||
1
node_modules/geotiff/dist-node/source/httputils.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/httputils.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"httputils.js","sourceRoot":"","sources":["../../dist-module/source/httputils.js"],"names":[],"mappings":";;;AAAA,MAAM,QAAQ,GAAG,UAAU,CAAC;AAE5B;;GAEG;AACH,SAAS,aAAa,CAAC,KAAK;IAC1B,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;QAC7C,OAAO,MAAM,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;KAClC;IACD,MAAM,GAAG,GAAG,EAAE,CAAC;IACf,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,KAAK,EAAE;QAChC,GAAG,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,GAAG,KAAK,CAAC;KAChC;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;GAIG;AACH,SAAS,YAAY,CAAC,IAAI;IACxB,MAAM,KAAK,GAAG,IAAI;SACf,KAAK,CAAC,MAAM,CAAC;SACb,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QACZ,MAAM,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC;QACpD,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC;QAC5B,OAAO,EAAE,CAAC;IACZ,CAAC,CAAC,CAAC;IAEL,OAAO,aAAa,CAAC,KAAK,CAAC,CAAC;AAC9B,CAAC;AAED;;;;GAIG;AACH,SAAgB,gBAAgB,CAAC,cAAc;IAC7C,MAAM,CAAC,IAAI,EAAE,GAAG,SAAS,CAAC,GAAG,cAAc,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;IAC5E,MAAM,WAAW,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;IAC/D,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,aAAa,CAAC,WAAW,CAAC,EAAE,CAAC;AACtD,CAAC;AAJD,4CAIC;AAED;;;;GAIG;AACH,SAAgB,iBAAiB,CAAC,eAAe;IAC/C,IAAI,KAAK,CAAC;IACV,IAAI,GAAG,CAAC;IACR,IAAI,KAAK,CAAC;IAEV,IAAI,eAAe,EAAE;QACnB,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE,KAAK,CAAC,GAAG,eAAe,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;QAC1E,KAAK,GAAG,QAAQ,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;QAC5B,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QACxB,KAAK,GAAG,QAAQ,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;KAC7B;IAED,OAAO,EAAE,KAAK,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC;AAC/B,CAAC;AAbD,8CAaC;AAED;;;;;;;;;;GAUG;AACH,SAAgB,eAAe,CAAC,mBAAmB,EAAE,QAAQ;IAC3D,IAAI,MAAM,GAAG,IAAI,CAAC;IAClB,MAAM,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;IACzC,MAAM,GAAG,GAAG,EAAE,CAAC;IAEf,MAAM,aAAa,GAAG,KAAK,QAAQ,EAAE,CAAC;IACtC,MAAM,WAAW,GAAG,GAAG,aAAa,IAAI,CAAC;IAEzC,+DAA+D;IAC/D,2DAA2D;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,EAAE,CAAC,EAAE;QAC3B,MAAM,IAAI,GAAG,OAAO,CAAC,MAAM,CACzB,IAAI,UAAU,CAAC,mBAAmB,EAAE,CAAC,EAAE,aAAa,CAAC,MAAM,CAAC,CAC7D,CAAC;QACF,IAAI,IAAI,KAAK,aAAa,EAAE;YAC1B,MAAM,GAAG,CAAC,CAAC;SACZ;KACF;IAED,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;KACpD;IAED,OAAO,MAAM,GAAG,mBAAmB,CAAC,UAAU,EAAE;QAC9C,MAAM,IAAI,GAAG,OAAO,CAAC,MAAM,CACzB,IAAI,UAAU,CAAC,mBAAmB,EAAE,MAAM,EACxC,IAAI,CAAC,GAAG,CAAC,aAAa,CAAC,MAAM,GAAG,IAAI,EAAE,mBAAmB,CAAC,UAAU,GAAG,MAAM,CAAC,CAC/E,CACF,CAAC;QAEF,iCAAiC;QACjC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,EAAE;YACrD,MAAM;SACP;QAED,qFAAqF;QACrF,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;YACnC,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAC;SACtD;QAED,iDAAiD;QACjD,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAExD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC1B,MAAM;SACP;QAED,gEAAgE;QAChE,MAAM,YAAY,GAAG,SAAS,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAEjD,kDAAkD;QAClD,MAAM,OAAO,GAAG,YAAY,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;QAChE,MAAM,EAAE,KAAK,EAAE,GAAG,EAAE,KAAK,EAAE,GAAG,iBAAiB,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC,CAAC;QAE1E,wDAAwD;QACxD,MAAM,WAAW,GAAG,MAAM,GAAG,aAAa,CAAC,MAAM,GAAG,YAAY,GAAG,QAAQ,CAAC,MAAM,CAAC;QACnF,MAAM,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;QAC3D,GAAG,CAAC,IAAI,CAAC;YACP,OAAO;YACP,IAAI,EAAE,mBAAmB,CAAC,KAAK,CAAC,WAAW,EAAE,WAAW,GAAG,MAAM,CAAC;YAClE,MAAM,EAAE,KAAK;YACb,MAAM;YACN,QAAQ,EAAE,KAAK;SAChB,CAAC,CAAC;QAEH,MAAM,GAAG,WAAW,GAAG,MAAM,GAAG,CAAC,CAAC;KACnC;IAED,OAAO,GAAG,CAAC;AACb,CAAC;AArED,0CAqEC"}
|
||||
23
node_modules/geotiff/dist-node/source/remote.d.ts
generated
vendored
Normal file
23
node_modules/geotiff/dist-node/source/remote.d.ts
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
export function makeFetchSource(url: any, { headers, credentials, maxRanges, allowFullFile, ...blockOptions }?: {
|
||||
headers?: {} | undefined;
|
||||
credentials: any;
|
||||
maxRanges?: number | undefined;
|
||||
allowFullFile?: boolean | undefined;
|
||||
}): any;
|
||||
export function makeXHRSource(url: any, { headers, maxRanges, allowFullFile, ...blockOptions }?: {
|
||||
headers?: {} | undefined;
|
||||
maxRanges?: number | undefined;
|
||||
allowFullFile?: boolean | undefined;
|
||||
}): any;
|
||||
export function makeHttpSource(url: any, { headers, maxRanges, allowFullFile, ...blockOptions }?: {
|
||||
headers?: {} | undefined;
|
||||
maxRanges?: number | undefined;
|
||||
allowFullFile?: boolean | undefined;
|
||||
}): any;
|
||||
/**
|
||||
*
|
||||
* @param {string} url
|
||||
* @param {object} options
|
||||
*/
|
||||
export function makeRemoteSource(url: string, { forceXHR, ...clientOptions }?: object): any;
|
||||
//# sourceMappingURL=remote.d.ts.map
|
||||
1
node_modules/geotiff/dist-node/source/remote.d.ts.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/remote.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"remote.d.ts","sourceRoot":"","sources":["../../dist-module/source/remote.js"],"names":[],"mappings":"AA+JA;;;;;QAIC;AAED;;;;QAIC;AAED;;;;QAIC;AAED;;;;GAIG;AACH,sCAHW,MAAM,mCACN,MAAM,OAUhB"}
|
||||
179
node_modules/geotiff/dist-node/source/remote.js
generated
vendored
Normal file
179
node_modules/geotiff/dist-node/source/remote.js
generated
vendored
Normal file
@@ -0,0 +1,179 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.makeRemoteSource = exports.makeHttpSource = exports.makeXHRSource = exports.makeFetchSource = void 0;
|
||||
const httputils_js_1 = require("./httputils.js");
|
||||
const basesource_js_1 = require("./basesource.js");
|
||||
const blockedsource_js_1 = require("./blockedsource.js");
|
||||
const fetch_js_1 = require("./client/fetch.js");
|
||||
const xhr_js_1 = require("./client/xhr.js");
|
||||
const http_js_1 = require("./client/http.js");
|
||||
class RemoteSource extends basesource_js_1.BaseSource {
|
||||
/**
|
||||
*
|
||||
* @param {BaseClient} client
|
||||
* @param {object} headers
|
||||
* @param {numbers} maxRanges
|
||||
* @param {boolean} allowFullFile
|
||||
*/
|
||||
constructor(client, headers, maxRanges, allowFullFile) {
|
||||
super();
|
||||
this.client = client;
|
||||
this.headers = headers;
|
||||
this.maxRanges = maxRanges;
|
||||
this.allowFullFile = allowFullFile;
|
||||
this._fileSize = null;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param {Slice[]} slices
|
||||
*/
|
||||
async fetch(slices, signal) {
|
||||
// if we allow multi-ranges, split the incoming request into that many sub-requests
|
||||
// and join them afterwards
|
||||
if (this.maxRanges >= slices.length) {
|
||||
return this.fetchSlices(slices, signal);
|
||||
}
|
||||
else if (this.maxRanges > 0 && slices.length > 1) {
|
||||
// TODO: split into multiple multi-range requests
|
||||
// const subSlicesRequests = [];
|
||||
// for (let i = 0; i < slices.length; i += this.maxRanges) {
|
||||
// subSlicesRequests.push(
|
||||
// this.fetchSlices(slices.slice(i, i + this.maxRanges), signal),
|
||||
// );
|
||||
// }
|
||||
// return (await Promise.all(subSlicesRequests)).flat();
|
||||
}
|
||||
// otherwise make a single request for each slice
|
||||
return Promise.all(slices.map((slice) => this.fetchSlice(slice, signal)));
|
||||
}
|
||||
async fetchSlices(slices, signal) {
|
||||
const response = await this.client.request({
|
||||
headers: {
|
||||
...this.headers,
|
||||
Range: `bytes=${slices
|
||||
.map(({ offset, length }) => `${offset}-${offset + length}`)
|
||||
.join(',')}`,
|
||||
},
|
||||
signal,
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error('Error fetching data.');
|
||||
}
|
||||
else if (response.status === 206) {
|
||||
const { type, params } = (0, httputils_js_1.parseContentType)(response.getHeader('content-type'));
|
||||
if (type === 'multipart/byteranges') {
|
||||
const byteRanges = (0, httputils_js_1.parseByteRanges)(await response.getData(), params.boundary);
|
||||
this._fileSize = byteRanges[0].fileSize || null;
|
||||
return byteRanges;
|
||||
}
|
||||
const data = await response.getData();
|
||||
const { start, end, total } = (0, httputils_js_1.parseContentRange)(response.getHeader('content-range'));
|
||||
this._fileSize = total || null;
|
||||
const first = [{
|
||||
data,
|
||||
offset: start,
|
||||
length: end - start,
|
||||
}];
|
||||
if (slices.length > 1) {
|
||||
// we requested more than one slice, but got only the first
|
||||
// unfortunately, some HTTP Servers don't support multi-ranges
|
||||
// and return onyl the first
|
||||
// get the rest of the slices and fetch them iteratetively
|
||||
const others = await Promise.all(slices.slice(1).map((slice) => this.fetchSlice(slice, signal)));
|
||||
return first.concat(others);
|
||||
}
|
||||
return first;
|
||||
}
|
||||
else {
|
||||
if (!this.allowFullFile) {
|
||||
throw new Error('Server responded with full file');
|
||||
}
|
||||
const data = await response.getData();
|
||||
this._fileSize = data.byteLength;
|
||||
return [{
|
||||
data,
|
||||
offset: 0,
|
||||
length: data.byteLength,
|
||||
}];
|
||||
}
|
||||
}
|
||||
async fetchSlice(slice, signal) {
|
||||
const { offset, length } = slice;
|
||||
const response = await this.client.request({
|
||||
headers: {
|
||||
...this.headers,
|
||||
Range: `bytes=${offset}-${offset + length}`,
|
||||
},
|
||||
signal,
|
||||
});
|
||||
// check the response was okay and if the server actually understands range requests
|
||||
if (!response.ok) {
|
||||
throw new Error('Error fetching data.');
|
||||
}
|
||||
else if (response.status === 206) {
|
||||
const data = await response.getData();
|
||||
const { total } = (0, httputils_js_1.parseContentRange)(response.getHeader('content-range'));
|
||||
this._fileSize = total || null;
|
||||
return {
|
||||
data,
|
||||
offset,
|
||||
length,
|
||||
};
|
||||
}
|
||||
else {
|
||||
if (!this.allowFullFile) {
|
||||
throw new Error('Server responded with full file');
|
||||
}
|
||||
const data = await response.getData();
|
||||
this._fileSize = data.byteLength;
|
||||
return {
|
||||
data,
|
||||
offset: 0,
|
||||
length: data.byteLength,
|
||||
};
|
||||
}
|
||||
}
|
||||
get fileSize() {
|
||||
return this._fileSize;
|
||||
}
|
||||
}
|
||||
function maybeWrapInBlockedSource(source, { blockSize, cacheSize }) {
|
||||
if (blockSize === null) {
|
||||
return source;
|
||||
}
|
||||
return new blockedsource_js_1.BlockedSource(source, blockSize, cacheSize);
|
||||
}
|
||||
function makeFetchSource(url, { headers = {}, credentials, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
|
||||
const client = new fetch_js_1.FetchClient(url, credentials);
|
||||
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
|
||||
return maybeWrapInBlockedSource(source, blockOptions);
|
||||
}
|
||||
exports.makeFetchSource = makeFetchSource;
|
||||
function makeXHRSource(url, { headers = {}, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
|
||||
const client = new xhr_js_1.XHRClient(url);
|
||||
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
|
||||
return maybeWrapInBlockedSource(source, blockOptions);
|
||||
}
|
||||
exports.makeXHRSource = makeXHRSource;
|
||||
function makeHttpSource(url, { headers = {}, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
|
||||
const client = new http_js_1.HttpClient(url);
|
||||
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
|
||||
return maybeWrapInBlockedSource(source, blockOptions);
|
||||
}
|
||||
exports.makeHttpSource = makeHttpSource;
|
||||
/**
|
||||
*
|
||||
* @param {string} url
|
||||
* @param {object} options
|
||||
*/
|
||||
function makeRemoteSource(url, { forceXHR = false, ...clientOptions } = {}) {
|
||||
if (typeof fetch === 'function' && !forceXHR) {
|
||||
return makeFetchSource(url, clientOptions);
|
||||
}
|
||||
if (typeof XMLHttpRequest !== 'undefined') {
|
||||
return makeXHRSource(url, clientOptions);
|
||||
}
|
||||
return makeHttpSource(url, clientOptions);
|
||||
}
|
||||
exports.makeRemoteSource = makeRemoteSource;
|
||||
//# sourceMappingURL=remote.js.map
|
||||
1
node_modules/geotiff/dist-node/source/remote.js.map
generated
vendored
Normal file
1
node_modules/geotiff/dist-node/source/remote.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user