planning
All checks were successful
Publish To Prod / deploy_and_publish (push) Successful in 35s

This commit is contained in:
2024-10-14 09:15:30 +02:00
parent bcba00a730
commit 6e64e138e2
21059 changed files with 2317811 additions and 1 deletions

View File

@@ -0,0 +1,8 @@
export function makeBufferSource(arrayBuffer: any): ArrayBufferSource;
declare class ArrayBufferSource extends BaseSource {
constructor(arrayBuffer: any);
arrayBuffer: any;
}
import { BaseSource } from "./basesource.js";
export {};
//# sourceMappingURL=arraybuffer.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"arraybuffer.d.ts","sourceRoot":"","sources":["../../src/source/arraybuffer.js"],"names":[],"mappings":"AAiBA,sEAEC;AAhBD;IACE,8BAGC;IADC,iBAA8B;CASjC"}

20
node_modules/geotiff/dist-module/source/arraybuffer.js generated vendored Normal file
View File

@@ -0,0 +1,20 @@
import { BaseSource } from './basesource.js';
import { AbortError } from '../utils.js';
class ArrayBufferSource extends BaseSource {
constructor(arrayBuffer) {
super();
this.arrayBuffer = arrayBuffer;
}
fetchSlice(slice, signal) {
if (signal && signal.aborted) {
throw new AbortError('Request aborted');
}
return this.arrayBuffer.slice(slice.offset, slice.offset + slice.length);
}
}
export function makeBufferSource(arrayBuffer) {
return new ArrayBufferSource(arrayBuffer);
}

View File

@@ -0,0 +1,29 @@
/**
* @typedef Slice
* @property {number} offset
* @property {number} length
*/
export class BaseSource {
/**
*
* @param {Slice[]} slices
* @returns {ArrayBuffer[]}
*/
fetch(slices: Slice[], signal?: undefined): ArrayBuffer[];
/**
*
* @param {Slice} slice
* @returns {ArrayBuffer}
*/
fetchSlice(slice: Slice): ArrayBuffer;
/**
* Returns the filesize if already determined and null otherwise
*/
get fileSize(): null;
close(): Promise<void>;
}
export type Slice = {
offset: number;
length: number;
};
//# sourceMappingURL=basesource.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"basesource.d.ts","sourceRoot":"","sources":["../../src/source/basesource.js"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;IACE;;;;OAIG;IACH,cAHW,KAAK,EAAE,uBACL,WAAW,EAAE,CAMzB;IAED;;;;OAIG;IACH,kBAHW,KAAK,GACH,WAAW,CAIvB;IAED;;OAEG;IACH,qBAEC;IAED,uBAEC;CACF;;YAnCa,MAAM;YACN,MAAM"}

38
node_modules/geotiff/dist-module/source/basesource.js generated vendored Normal file
View File

@@ -0,0 +1,38 @@
/**
* @typedef Slice
* @property {number} offset
* @property {number} length
*/
export class BaseSource {
/**
*
* @param {Slice[]} slices
* @returns {ArrayBuffer[]}
*/
async fetch(slices, signal = undefined) {
return Promise.all(
slices.map((slice) => this.fetchSlice(slice, signal)),
);
}
/**
*
* @param {Slice} slice
* @returns {ArrayBuffer}
*/
async fetchSlice(slice) {
throw new Error(`fetching of slice ${slice} not possible, not implemented`);
}
/**
* Returns the filesize if already determined and null otherwise
*/
get fileSize() {
return null;
}
async close() {
// no-op by default
}
}

View File

@@ -0,0 +1,45 @@
export class BlockedSource extends BaseSource {
/**
*
* @param {Source} source The underlying source that shall be blocked and cached
* @param {object} options
*/
constructor(source: Source, { blockSize, cacheSize }?: object);
source: Source;
blockSize: any;
blockCache: any;
blockRequests: Map<any, any>;
blockIdsToFetch: Set<any>;
/**
*
* @param {AbortSignal} signal
*/
fetchBlocks(signal: AbortSignal): void;
/**
*
* @param {Set} blockIds
* @returns {BlockGroup[]}
*/
groupBlocks(blockIds: Set<any>): BlockGroup[];
/**
*
* @param {Slice[]} slices
* @param {Map} blocks
*/
readSliceData(slices: Slice[], blocks: Map<any, any>): ArrayBuffer[];
}
import { BaseSource } from "./basesource.js";
declare class BlockGroup {
/**
*
* @param {number} offset
* @param {number} length
* @param {number[]} blockIds
*/
constructor(offset: number, length: number, blockIds: number[]);
offset: number;
length: number;
blockIds: number[];
}
export {};
//# sourceMappingURL=blockedsource.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"blockedsource.d.ts","sourceRoot":"","sources":["../../src/source/blockedsource.js"],"names":[],"mappings":"AAuCA;IACE;;;;OAIG;IACH,uDAFW,MAAM,EAchB;IAVC,eAAoB;IACpB,eAA0B;IAE1B,gBAAkD;IAGlD,6BAA8B;IAG9B,0BAAgC;IAqHlC;;;OAGG;IACH,oBAFW,WAAW,QA6CrB;IAED;;;;OAIG;IACH,iCAFa,UAAU,EAAE,CAiCxB;IAED;;;;OAIG;IACH,sBAHW,OAAO,wCAqCjB;CACF;;AAnRD;IACE;;;;;OAKG;IACH,oBAJW,MAAM,UACN,MAAM,YACN,MAAM,EAAE,EAMlB;IAHC,eAAoB;IACpB,eAAoB;IACpB,mBAAwB;CAE3B"}

View File

@@ -0,0 +1,301 @@
import LRUCache from 'lru-cache';
import { BaseSource } from './basesource.js';
import { AbortError, AggregateError, wait, zip } from '../utils.js';
class Block {
/**
*
* @param {number} offset
* @param {number} length
* @param {ArrayBuffer} [data]
*/
constructor(offset, length, data = null) {
this.offset = offset;
this.length = length;
this.data = data;
}
/**
* @returns {number} the top byte border
*/
get top() {
return this.offset + this.length;
}
}
class BlockGroup {
/**
*
* @param {number} offset
* @param {number} length
* @param {number[]} blockIds
*/
constructor(offset, length, blockIds) {
this.offset = offset;
this.length = length;
this.blockIds = blockIds;
}
}
export class BlockedSource extends BaseSource {
/**
*
* @param {Source} source The underlying source that shall be blocked and cached
* @param {object} options
*/
constructor(source, { blockSize = 65536, cacheSize = 100 } = {}) {
super();
this.source = source;
this.blockSize = blockSize;
this.blockCache = new LRUCache({ max: cacheSize });
// mapping blockId -> Block instance
this.blockRequests = new Map();
// set of blockIds missing for the current requests
this.blockIdsToFetch = new Set();
}
get fileSize() {
return this.source.fileSize;
}
/**
*
* @param {basesource/Slice[]} slices
*/
async fetch(slices, signal) {
const cachedBlocks = new Map();
const blockRequests = new Map();
const missingBlockIds = new Set();
for (const { offset, length } of slices) {
let top = offset + length;
const { fileSize } = this;
if (fileSize !== null) {
top = Math.min(top, fileSize);
}
const firstBlockOffset = Math.floor(offset / this.blockSize) * this.blockSize;
// chunk the current slice into blocks
for (let current = firstBlockOffset; current < top; current += this.blockSize) {
// check if the block is cached, being requested or still missing
const blockId = Math.floor(current / this.blockSize);
if (this.blockCache.has(blockId)) {
cachedBlocks.set(blockId, this.blockCache.get(blockId));
} else if (this.blockRequests.has(blockId)) {
blockRequests.set(blockId, this.blockRequests.get(blockId));
} else if (this.blockIdsToFetch.has(blockId)) {
missingBlockIds.add(blockId);
} else {
this.blockIdsToFetch.add(blockId);
missingBlockIds.add(blockId);
}
}
}
// allow additional block requests to accumulate
await wait();
this.fetchBlocks(signal);
for (const blockId of missingBlockIds) {
const block = this.blockRequests.get(blockId);
const cachedBlock = this.blockCache.get(blockId);
if (block) {
blockRequests.set(blockId, block);
} else if (cachedBlock) {
cachedBlocks.set(blockId, cachedBlock);
} else {
throw new Error(`Block ${blockId} is not in the block requests`);
}
}
// actually await all pending requests
let results = await Promise.allSettled(Array.from(blockRequests.values()));
// perform retries if a block was interrupted by a previous signal
if (results.some((result) => result.status === 'rejected')) {
const retriedBlockRequests = new Set();
for (const [blockId, result] of zip(blockRequests.keys(), results)) {
const { rejected, reason } = result;
if (rejected) {
// push some blocks back to the to-fetch list if they were
// aborted, but only when a different signal was used
if (reason.name === 'AbortError' && reason.signal !== signal) {
this.blockIdsToFetch.add(blockId);
retriedBlockRequests.add(blockId);
}
}
}
// start the retry of some blocks if required
if (this.blockIdsToFetch.length > 0) {
this.fetchBlocks(signal);
for (const blockId of retriedBlockRequests) {
const block = this.blockRequests.get(blockId);
if (!block) {
throw new Error(`Block ${blockId} is not in the block requests`);
}
blockRequests.set(blockId, block);
}
results = await Promise.allSettled(Array.from(blockRequests.values()));
}
}
// throw an error (either abort error or AggregateError if no abort was done)
if (results.some((result) => result.status === 'rejected')) {
if (signal && signal.aborted) {
throw new AbortError('Request was aborted');
}
throw new AggregateError(
results.filter((result) => result.status === 'rejected').map((result) => result.reason),
'Request failed',
);
}
// extract the actual block responses
const values = results.map((result) => result.value);
// create a final Map, with all required blocks for this request to satisfy
const requiredBlocks = new Map(zip(Array.from(blockRequests.keys()), values));
for (const [blockId, block] of cachedBlocks) {
requiredBlocks.set(blockId, block);
}
// TODO: satisfy each slice
return this.readSliceData(slices, requiredBlocks);
}
/**
*
* @param {AbortSignal} signal
*/
fetchBlocks(signal) {
// check if we still need to
if (this.blockIdsToFetch.size > 0) {
const groups = this.groupBlocks(this.blockIdsToFetch);
// start requesting slices of data
const groupRequests = this.source.fetch(groups, signal);
for (let groupIndex = 0; groupIndex < groups.length; ++groupIndex) {
const group = groups[groupIndex];
for (const blockId of group.blockIds) {
// make an async IIFE for each block
const blockRequest = (async () => {
try {
const response = (await groupRequests)[groupIndex];
const blockOffset = blockId * this.blockSize;
const o = blockOffset - response.offset;
const t = Math.min(o + this.blockSize, response.data.byteLength);
const data = response.data.slice(o, t);
const block = new Block(
blockOffset,
data.byteLength,
data,
);
this.blockCache.set(blockId, block);
return block;
} catch (err) {
if (err.name === 'AbortError') {
// store the signal here, we need it to determine later if an
// error was caused by this signal
err.signal = signal;
}
throw err;
} finally {
this.blockRequests.delete(blockId);
}
})();
this.blockRequests.set(blockId, blockRequest);
}
}
this.blockIdsToFetch.clear();
}
}
/**
*
* @param {Set} blockIds
* @returns {BlockGroup[]}
*/
groupBlocks(blockIds) {
const sortedBlockIds = Array.from(blockIds).sort((a, b) => a - b);
if (sortedBlockIds.length === 0) {
return [];
}
let current = [];
let lastBlockId = null;
const groups = [];
for (const blockId of sortedBlockIds) {
if (lastBlockId === null || lastBlockId + 1 === blockId) {
current.push(blockId);
lastBlockId = blockId;
} else {
groups.push(new BlockGroup(
current[0] * this.blockSize,
current.length * this.blockSize,
current,
));
current = [blockId];
lastBlockId = blockId;
}
}
groups.push(new BlockGroup(
current[0] * this.blockSize,
current.length * this.blockSize,
current,
));
return groups;
}
/**
*
* @param {Slice[]} slices
* @param {Map} blocks
*/
readSliceData(slices, blocks) {
return slices.map((slice) => {
const top = slice.offset + slice.length;
const blockIdLow = Math.floor(slice.offset / this.blockSize);
const blockIdHigh = Math.floor((slice.offset + slice.length) / this.blockSize);
const sliceData = new ArrayBuffer(slice.length);
const sliceView = new Uint8Array(sliceData);
for (let blockId = blockIdLow; blockId <= blockIdHigh; ++blockId) {
const block = blocks.get(blockId);
const delta = block.offset - slice.offset;
const topDelta = block.top - top;
let blockInnerOffset = 0;
let rangeInnerOffset = 0;
let usedBlockLength;
if (delta < 0) {
blockInnerOffset = -delta;
} else if (delta > 0) {
rangeInnerOffset = delta;
}
if (topDelta < 0) {
usedBlockLength = block.length - blockInnerOffset;
} else {
usedBlockLength = top - block.offset - blockInnerOffset;
}
const blockView = new Uint8Array(block.data, blockInnerOffset, usedBlockLength);
sliceView.set(blockView, rangeInnerOffset);
}
return sliceData;
});
}
}

View File

@@ -0,0 +1,30 @@
export class BaseResponse {
/**
* Returns whether the response has an ok'ish status code
*/
get ok(): boolean;
/**
* Returns the status code of the response
*/
get status(): void;
/**
* Returns the value of the specified header
* @param {string} headerName the header name
* @returns {string} the header value
*/
getHeader(headerName: string): string;
/**
* @returns {ArrayBuffer} the response data of the request
*/
getData(): ArrayBuffer;
}
export class BaseClient {
constructor(url: any);
url: any;
/**
* Send a request with the options
* @param {object} [options]
*/
request({ headers, credentials, signal }?: object): Promise<void>;
}
//# sourceMappingURL=base.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"base.d.ts","sourceRoot":"","sources":["../../../src/source/client/base.js"],"names":[],"mappings":"AAAA;IACE;;OAEG;IACH,kBAEC;IAED;;OAEG;IACH,mBAEC;IAED;;;;OAIG;IACH,sBAHW,MAAM,GACJ,MAAM,CAIlB;IAED;;OAEG;IACH,WAFa,WAAW,CAIvB;CACF;AAED;IACE,sBAEC;IADC,SAAc;IAGhB;;;OAGG;IACH,2CAFW,MAAM,iBAIhB;CACF"}

45
node_modules/geotiff/dist-module/source/client/base.js generated vendored Normal file
View File

@@ -0,0 +1,45 @@
export class BaseResponse {
/**
* Returns whether the response has an ok'ish status code
*/
get ok() {
return this.status >= 200 && this.status <= 299;
}
/**
* Returns the status code of the response
*/
get status() {
throw new Error('not implemented');
}
/**
* Returns the value of the specified header
* @param {string} headerName the header name
* @returns {string} the header value
*/
getHeader(headerName) { // eslint-disable-line no-unused-vars
throw new Error('not implemented');
}
/**
* @returns {ArrayBuffer} the response data of the request
*/
async getData() {
throw new Error('not implemented');
}
}
export class BaseClient {
constructor(url) {
this.url = url;
}
/**
* Send a request with the options
* @param {object} [options]
*/
async request({ headers, credentials, signal } = {}) { // eslint-disable-line no-unused-vars
throw new Error('request is not implemented');
}
}

View File

@@ -0,0 +1,6 @@
export class FetchClient extends BaseClient {
constructor(url: any, credentials: any);
credentials: any;
}
import { BaseClient } from "./base.js";
//# sourceMappingURL=fetch.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"fetch.d.ts","sourceRoot":"","sources":["../../../src/source/client/fetch.js"],"names":[],"mappings":"AA4BA;IACE,wCAGC;IADC,iBAA8B;CASjC"}

View File

@@ -0,0 +1,41 @@
import { BaseClient, BaseResponse } from './base.js';
class FetchResponse extends BaseResponse {
/**
* BaseResponse facade for fetch API Response
* @param {Response} response
*/
constructor(response) {
super();
this.response = response;
}
get status() {
return this.response.status;
}
getHeader(name) {
return this.response.headers.get(name);
}
async getData() {
const data = this.response.arrayBuffer
? await this.response.arrayBuffer()
: (await this.response.buffer()).buffer;
return data;
}
}
export class FetchClient extends BaseClient {
constructor(url, credentials) {
super(url);
this.credentials = credentials;
}
async request({ headers, credentials, signal } = {}) {
const response = await fetch(this.url, {
headers, credentials, signal,
});
return new FetchResponse(response);
}
}

View File

@@ -0,0 +1,7 @@
export class HttpClient extends BaseClient {
parsedUrl: any;
httpApi: any;
constructRequest(headers: any, signal: any): Promise<any>;
}
import { BaseClient } from "./base.js";
//# sourceMappingURL=http.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"http.d.ts","sourceRoot":"","sources":["../../../src/source/client/http.js"],"names":[],"mappings":"AAgCA;IAGI,eAAuC;IACvC,aAAmE;IAGrE,0DAmCC;CAMF"}

81
node_modules/geotiff/dist-module/source/client/http.js generated vendored Normal file
View File

@@ -0,0 +1,81 @@
import http from 'http';
import https from 'https';
import urlMod from 'url';
import { BaseClient, BaseResponse } from './base.js';
import { AbortError } from '../../utils.js';
class HttpResponse extends BaseResponse {
/**
* BaseResponse facade for node HTTP/HTTPS API Response
* @param {http.ServerResponse} response
*/
constructor(response, dataPromise) {
super();
this.response = response;
this.dataPromise = dataPromise;
}
get status() {
return this.response.statusCode;
}
getHeader(name) {
return this.response.headers[name];
}
async getData() {
const data = await this.dataPromise;
return data;
}
}
export class HttpClient extends BaseClient {
constructor(url) {
super(url);
this.parsedUrl = urlMod.parse(this.url);
this.httpApi = (this.parsedUrl.protocol === 'http:' ? http : https);
}
constructRequest(headers, signal) {
return new Promise((resolve, reject) => {
const request = this.httpApi.get(
{
...this.parsedUrl,
headers,
},
(response) => {
const dataPromise = new Promise((resolveData) => {
const chunks = [];
// collect chunks
response.on('data', (chunk) => {
chunks.push(chunk);
});
// concatenate all chunks and resolve the promise with the resulting buffer
response.on('end', () => {
const data = Buffer.concat(chunks).buffer;
resolveData(data);
});
response.on('error', reject);
});
resolve(new HttpResponse(response, dataPromise));
},
);
request.on('error', reject);
if (signal) {
if (signal.aborted) {
request.destroy(new AbortError('Request aborted'));
}
signal.addEventListener('abort', () => request.destroy(new AbortError('Request aborted')));
}
});
}
async request({ headers, signal } = {}) {
const response = await this.constructRequest(headers, signal);
return response;
}
}

View File

@@ -0,0 +1,5 @@
export class XHRClient extends BaseClient {
constructRequest(headers: any, signal: any): Promise<any>;
}
import { BaseClient } from "./base.js";
//# sourceMappingURL=xhr.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"xhr.d.ts","sourceRoot":"","sources":["../../../src/source/client/xhr.js"],"names":[],"mappings":"AA4BA;IACE,0DAyBC;CAMF"}

61
node_modules/geotiff/dist-module/source/client/xhr.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
import { BaseClient, BaseResponse } from './base.js';
import { AbortError } from '../../utils.js';
class XHRResponse extends BaseResponse {
/**
* BaseResponse facade for XMLHttpRequest
* @param {XMLHttpRequest} xhr
* @param {ArrayBuffer} data
*/
constructor(xhr, data) {
super();
this.xhr = xhr;
this.data = data;
}
get status() {
return this.xhr.status;
}
getHeader(name) {
return this.xhr.getResponseHeader(name);
}
async getData() {
return this.data;
}
}
export class XHRClient extends BaseClient {
constructRequest(headers, signal) {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.open('GET', this.url);
xhr.responseType = 'arraybuffer';
for (const [key, value] of Object.entries(headers)) {
xhr.setRequestHeader(key, value);
}
// hook signals
xhr.onload = () => {
const data = xhr.response;
resolve(new XHRResponse(xhr, data));
};
xhr.onerror = reject;
xhr.onabort = () => reject(new AbortError('Request aborted'));
xhr.send();
if (signal) {
if (signal.aborted) {
xhr.abort();
}
signal.addEventListener('abort', () => xhr.abort());
}
});
}
async request({ headers, signal } = {}) {
const response = await this.constructRequest(headers, signal);
return response;
}
}

9
node_modules/geotiff/dist-module/source/file.d.ts generated vendored Normal file
View File

@@ -0,0 +1,9 @@
export function makeFileSource(path: any): FileSource;
declare class FileSource extends BaseSource {
constructor(path: any);
path: any;
openRequest: Promise<any>;
}
import { BaseSource } from "./basesource.js";
export {};
//# sourceMappingURL=file.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../src/source/file.js"],"names":[],"mappings":"AAiEA,sDAEC;AA5BD;IACE,uBAIC;IAFC,UAAgB;IAChB,0BAAuC;CAoB1C"}

68
node_modules/geotiff/dist-module/source/file.js generated vendored Normal file
View File

@@ -0,0 +1,68 @@
import fs from 'fs';
import { BaseSource } from './basesource.js';
function closeAsync(fd) {
return new Promise((resolve, reject) => {
fs.close(fd, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
function openAsync(path, flags, mode = undefined) {
return new Promise((resolve, reject) => {
fs.open(path, flags, mode, (err, fd) => {
if (err) {
reject(err);
} else {
resolve(fd);
}
});
});
}
function readAsync(...args) {
return new Promise((resolve, reject) => {
fs.read(...args, (err, bytesRead, buffer) => {
if (err) {
reject(err);
} else {
resolve({ bytesRead, buffer });
}
});
});
}
class FileSource extends BaseSource {
constructor(path) {
super();
this.path = path;
this.openRequest = openAsync(path, 'r');
}
async fetchSlice(slice) {
// TODO: use `signal`
const fd = await this.openRequest;
const { buffer } = await readAsync(
fd,
Buffer.alloc(slice.length),
0,
slice.length,
slice.offset,
);
return buffer.buffer;
}
async close() {
const fd = await this.openRequest;
await closeAsync(fd);
}
}
export function makeFileSource(path) {
return new FileSource(path);
}

View File

@@ -0,0 +1,13 @@
/**
* Create a new source from a given file/blob.
* @param {Blob} file The file or blob to read from.
* @returns The constructed source
*/
export function makeFileReaderSource(file: Blob): FileReaderSource;
declare class FileReaderSource extends BaseSource {
constructor(file: any);
file: any;
}
import { BaseSource } from "./basesource.js";
export {};
//# sourceMappingURL=filereader.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"filereader.d.ts","sourceRoot":"","sources":["../../src/source/filereader.js"],"names":[],"mappings":"AAwBA;;;;GAIG;AACH,2CAHW,IAAI,oBAKd;AA7BD;IACE,uBAGC;IADC,UAAgB;CAiBnB"}

32
node_modules/geotiff/dist-module/source/filereader.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
import { BaseSource } from './basesource.js';
class FileReaderSource extends BaseSource {
constructor(file) {
super();
this.file = file;
}
async fetchSlice(slice, signal) {
return new Promise((resolve, reject) => {
const blob = this.file.slice(slice.offset, slice.offset + slice.length);
const reader = new FileReader();
reader.onload = (event) => resolve(event.target.result);
reader.onerror = reject;
reader.onabort = reject;
reader.readAsArrayBuffer(blob);
if (signal) {
signal.addEventListener('abort', () => reader.abort());
}
});
}
}
/**
* Create a new source from a given file/blob.
* @param {Blob} file The file or blob to read from.
* @returns The constructed source
*/
export function makeFileReaderSource(file) {
return new FileReaderSource(file);
}

25
node_modules/geotiff/dist-module/source/httputils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,25 @@
/**
* Parse a 'Content-Type' header value to the content-type and parameters
* @param {String} rawContentType the raw string to parse from
* @returns {Object} the parsed content type with the fields: type and params
*/
export function parseContentType(rawContentType: string): any;
/**
* Parse a 'Content-Range' header value to its start, end, and total parts
* @param {String} rawContentRange the raw string to parse from
* @returns {Object} the parsed parts
*/
export function parseContentRange(rawContentRange: string): any;
/**
* Parses a list of byteranges from the given 'multipart/byteranges' HTTP response.
* Each item in the list has the following properties:
* - headers: the HTTP headers
* - data: the sliced ArrayBuffer for that specific part
* - offset: the offset of the byterange within its originating file
* - length: the length of the byterange
* @param {ArrayBuffer} responseArrayBuffer the response to be parsed and split
* @param {String} boundary the boundary string used to split the sections
* @returns {Object[]} the parsed byteranges
*/
export function parseByteRanges(responseArrayBuffer: ArrayBuffer, boundary: string): any[];
//# sourceMappingURL=httputils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"httputils.d.ts","sourceRoot":"","sources":["../../src/source/httputils.js"],"names":[],"mappings":"AAiCA;;;;GAIG;AACH,8DAIC;AAED;;;;GAIG;AACH,gEAaC;AAED;;;;;;;;;;GAUG;AACH,qDAJW,WAAW,qBAET,KAAQ,CAuEpB"}

145
node_modules/geotiff/dist-module/source/httputils.js generated vendored Normal file
View File

@@ -0,0 +1,145 @@
const CRLFCRLF = '\r\n\r\n';
/*
* Shim for 'Object.fromEntries'
*/
function itemsToObject(items) {
if (typeof Object.fromEntries !== 'undefined') {
return Object.fromEntries(items);
}
const obj = {};
for (const [key, value] of items) {
obj[key.toLowerCase()] = value;
}
return obj;
}
/**
* Parse HTTP headers from a given string.
* @param {String} text the text to parse the headers from
* @returns {Object} the parsed headers with lowercase keys
*/
function parseHeaders(text) {
const items = text
.split('\r\n')
.map((line) => {
const kv = line.split(':').map((str) => str.trim());
kv[0] = kv[0].toLowerCase();
return kv;
});
return itemsToObject(items);
}
/**
* Parse a 'Content-Type' header value to the content-type and parameters
* @param {String} rawContentType the raw string to parse from
* @returns {Object} the parsed content type with the fields: type and params
*/
export function parseContentType(rawContentType) {
const [type, ...rawParams] = rawContentType.split(';').map((s) => s.trim());
const paramsItems = rawParams.map((param) => param.split('='));
return { type, params: itemsToObject(paramsItems) };
}
/**
* Parse a 'Content-Range' header value to its start, end, and total parts
* @param {String} rawContentRange the raw string to parse from
* @returns {Object} the parsed parts
*/
export function parseContentRange(rawContentRange) {
let start;
let end;
let total;
if (rawContentRange) {
[, start, end, total] = rawContentRange.match(/bytes (\d+)-(\d+)\/(\d+)/);
start = parseInt(start, 10);
end = parseInt(end, 10);
total = parseInt(total, 10);
}
return { start, end, total };
}
/**
* Parses a list of byteranges from the given 'multipart/byteranges' HTTP response.
* Each item in the list has the following properties:
* - headers: the HTTP headers
* - data: the sliced ArrayBuffer for that specific part
* - offset: the offset of the byterange within its originating file
* - length: the length of the byterange
* @param {ArrayBuffer} responseArrayBuffer the response to be parsed and split
* @param {String} boundary the boundary string used to split the sections
* @returns {Object[]} the parsed byteranges
*/
export function parseByteRanges(responseArrayBuffer, boundary) {
let offset = null;
const decoder = new TextDecoder('ascii');
const out = [];
const startBoundary = `--${boundary}`;
const endBoundary = `${startBoundary}--`;
// search for the initial boundary, may be offset by some bytes
// TODO: more efficient to check for `--` in bytes directly
for (let i = 0; i < 10; ++i) {
const text = decoder.decode(
new Uint8Array(responseArrayBuffer, i, startBoundary.length),
);
if (text === startBoundary) {
offset = i;
}
}
if (offset === null) {
throw new Error('Could not find initial boundary');
}
while (offset < responseArrayBuffer.byteLength) {
const text = decoder.decode(
new Uint8Array(responseArrayBuffer, offset,
Math.min(startBoundary.length + 1024, responseArrayBuffer.byteLength - offset),
),
);
// break if we arrived at the end
if (text.length === 0 || text.startsWith(endBoundary)) {
break;
}
// assert that we are actually dealing with a byterange and are at the correct offset
if (!text.startsWith(startBoundary)) {
throw new Error('Part does not start with boundary');
}
// get a substring from where we read the headers
const innerText = text.substr(startBoundary.length + 2);
if (innerText.length === 0) {
break;
}
// find the double linebreak that denotes the end of the headers
const endOfHeaders = innerText.indexOf(CRLFCRLF);
// parse the headers to get the content range size
const headers = parseHeaders(innerText.substr(0, endOfHeaders));
const { start, end, total } = parseContentRange(headers['content-range']);
// calculate the length of the slice and the next offset
const startOfData = offset + startBoundary.length + endOfHeaders + CRLFCRLF.length;
const length = parseInt(end, 10) + 1 - parseInt(start, 10);
out.push({
headers,
data: responseArrayBuffer.slice(startOfData, startOfData + length),
offset: start,
length,
fileSize: total,
});
offset = startOfData + length + 4;
}
return out;
}

23
node_modules/geotiff/dist-module/source/remote.d.ts generated vendored Normal file
View File

@@ -0,0 +1,23 @@
export function makeFetchSource(url: any, { headers, credentials, maxRanges, allowFullFile, ...blockOptions }?: {
headers?: {} | undefined;
credentials: any;
maxRanges?: number | undefined;
allowFullFile?: boolean | undefined;
}): any;
export function makeXHRSource(url: any, { headers, maxRanges, allowFullFile, ...blockOptions }?: {
headers?: {} | undefined;
maxRanges?: number | undefined;
allowFullFile?: boolean | undefined;
}): any;
export function makeHttpSource(url: any, { headers, maxRanges, allowFullFile, ...blockOptions }?: {
headers?: {} | undefined;
maxRanges?: number | undefined;
allowFullFile?: boolean | undefined;
}): any;
/**
*
* @param {string} url
* @param {object} options
*/
export function makeRemoteSource(url: string, { forceXHR, ...clientOptions }?: object): any;
//# sourceMappingURL=remote.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"remote.d.ts","sourceRoot":"","sources":["../../src/source/remote.js"],"names":[],"mappings":"AA+JA;;;;;QAIC;AAED;;;;QAIC;AAED;;;;QAIC;AAED;;;;GAIG;AACH,sCAHW,MAAM,mCACN,MAAM,OAUhB"}

191
node_modules/geotiff/dist-module/source/remote.js generated vendored Normal file
View File

@@ -0,0 +1,191 @@
import { parseByteRanges, parseContentRange, parseContentType } from './httputils.js';
import { BaseSource } from './basesource.js';
import { BlockedSource } from './blockedsource.js';
import { FetchClient } from './client/fetch.js';
import { XHRClient } from './client/xhr.js';
import { HttpClient } from './client/http.js';
class RemoteSource extends BaseSource {
/**
*
* @param {BaseClient} client
* @param {object} headers
* @param {numbers} maxRanges
* @param {boolean} allowFullFile
*/
constructor(client, headers, maxRanges, allowFullFile) {
super();
this.client = client;
this.headers = headers;
this.maxRanges = maxRanges;
this.allowFullFile = allowFullFile;
this._fileSize = null;
}
/**
*
* @param {Slice[]} slices
*/
async fetch(slices, signal) {
// if we allow multi-ranges, split the incoming request into that many sub-requests
// and join them afterwards
if (this.maxRanges >= slices.length) {
return this.fetchSlices(slices, signal);
} else if (this.maxRanges > 0 && slices.length > 1) {
// TODO: split into multiple multi-range requests
// const subSlicesRequests = [];
// for (let i = 0; i < slices.length; i += this.maxRanges) {
// subSlicesRequests.push(
// this.fetchSlices(slices.slice(i, i + this.maxRanges), signal),
// );
// }
// return (await Promise.all(subSlicesRequests)).flat();
}
// otherwise make a single request for each slice
return Promise.all(
slices.map((slice) => this.fetchSlice(slice, signal)),
);
}
async fetchSlices(slices, signal) {
const response = await this.client.request({
headers: {
...this.headers,
Range: `bytes=${slices
.map(({ offset, length }) => `${offset}-${offset + length}`)
.join(',')
}`,
},
signal,
});
if (!response.ok) {
throw new Error('Error fetching data.');
} else if (response.status === 206) {
const { type, params } = parseContentType(response.getHeader('content-type'));
if (type === 'multipart/byteranges') {
const byteRanges = parseByteRanges(await response.getData(), params.boundary);
this._fileSize = byteRanges[0].fileSize || null;
return byteRanges;
}
const data = await response.getData();
const { start, end, total } = parseContentRange(response.getHeader('content-range'));
this._fileSize = total || null;
const first = [{
data,
offset: start,
length: end - start,
}];
if (slices.length > 1) {
// we requested more than one slice, but got only the first
// unfortunately, some HTTP Servers don't support multi-ranges
// and return onyl the first
// get the rest of the slices and fetch them iteratetively
const others = await Promise.all(slices.slice(1).map((slice) => this.fetchSlice(slice, signal)));
return first.concat(others);
}
return first;
} else {
if (!this.allowFullFile) {
throw new Error('Server responded with full file');
}
const data = await response.getData();
this._fileSize = data.byteLength;
return [{
data,
offset: 0,
length: data.byteLength,
}];
}
}
async fetchSlice(slice, signal) {
const { offset, length } = slice;
const response = await this.client.request({
headers: {
...this.headers,
Range: `bytes=${offset}-${offset + length}`,
},
signal,
});
// check the response was okay and if the server actually understands range requests
if (!response.ok) {
throw new Error('Error fetching data.');
} else if (response.status === 206) {
const data = await response.getData();
const { total } = parseContentRange(response.getHeader('content-range'));
this._fileSize = total || null;
return {
data,
offset,
length,
};
} else {
if (!this.allowFullFile) {
throw new Error('Server responded with full file');
}
const data = await response.getData();
this._fileSize = data.byteLength;
return {
data,
offset: 0,
length: data.byteLength,
};
}
}
get fileSize() {
return this._fileSize;
}
}
function maybeWrapInBlockedSource(source, { blockSize, cacheSize }) {
if (blockSize === null) {
return source;
}
return new BlockedSource(source, blockSize, cacheSize);
}
export function makeFetchSource(url, { headers = {}, credentials, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const client = new FetchClient(url, credentials);
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
export function makeXHRSource(url, { headers = {}, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const client = new XHRClient(url);
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
export function makeHttpSource(url, { headers = {}, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {
const client = new HttpClient(url);
const source = new RemoteSource(client, headers, maxRanges, allowFullFile);
return maybeWrapInBlockedSource(source, blockOptions);
}
/**
*
* @param {string} url
* @param {object} options
*/
export function makeRemoteSource(url, { forceXHR = false, ...clientOptions } = {}) {
if (typeof fetch === 'function' && !forceXHR) {
return makeFetchSource(url, clientOptions);
}
if (typeof XMLHttpRequest !== 'undefined') {
return makeXHRSource(url, clientOptions);
}
return makeHttpSource(url, clientOptions);
}