diff --git a/.eslintrc.cjs b/.eslintrc.cjs
new file mode 100644
index 0000000000..67c96a841e
--- /dev/null
+++ b/.eslintrc.cjs
@@ -0,0 +1,133 @@
+const {getESLintConfig} = require('ocular-dev-tools/configuration');
+
+// Make any changes to default config here
+const config = getESLintConfig({
+ overrides: {
+ // To make import assertions work
+ parser: '@babel/eslint-parser',
+ parserOptions: {
+ project: ['./tsconfig.json'],
+ requireConfigFile: false,
+ babelOptions: {
+ plugins: ['@babel/plugin-syntax-import-assertions']
+ }
+ },
+ env: {
+ browser: true,
+ es2020: true,
+ node: true
+ },
+
+ rules: {
+ indent: 0,
+ 'import/no-unresolved': 0,
+ 'no-console': 1,
+ 'no-continue': ['warn'],
+ 'callback-return': 0,
+ 'max-depth': ['warn', 4],
+ complexity: ['warn'],
+ 'max-statements': ['warn'],
+ 'default-case': ['warn'],
+ 'no-eq-null': ['warn'],
+ eqeqeq: ['warn'],
+ radix: 0
+ // 'accessor-pairs': ['error', {getWithoutSet: false, setWithoutGet: false}]
+ },
+
+ overrides: [
+ {
+ files: ['**/*.ts', '**/*.tsx', '**/*.d.ts'],
+ rules: {
+ // typescript-eslint 6.0
+ '@typescript-eslint/no-unsafe-argument': 0,
+ '@typescript-eslint/no-redundant-type-constituents': 0,
+ '@typescript-eslint/no-unsafe-enum-comparison': 1,
+ '@typescript-eslint/no-duplicate-type-constituents': 1,
+ '@typescript-eslint/no-base-to-string': 1,
+ '@typescript-eslint/no-loss-of-precision': 1,
+
+ // For parquet module
+ '@typescript-eslint/no-non-null-assertion': 0,
+ '@typescript-eslint/no-non-null-asserted-optional-chain': 0,
+ '@typescript-eslint/no-floating-promises': 0,
+ // Gradually enable
+ '@typescript-eslint/ban-ts-comment': 0,
+ '@typescript-eslint/ban-types': 0,
+ '@typescript-eslint/no-unsafe-member-access': 0,
+ '@typescript-eslint/no-unsafe-assignment': 0,
+ '@typescript-eslint/no-var-requires': 0,
+ '@typescript-eslint/no-unused-vars': [
+ 'warn',
+ {vars: 'all', args: 'none', ignoreRestSiblings: false}
+ ],
+ // We still have some issues with import resolution
+ 'import/named': 0,
+ 'import/no-extraneous-dependencies': 0, // ['warn'], disable for test folder only...
+ // Warn instead of error
+ // 'max-params': ['warn'],
+ // 'no-undef': ['warn'],
+ // camelcase: ['warn'],
+ // '@typescript-eslint/no-floating-promises': ['warn'],
+ // '@typescript-eslint/await-thenable': ['warn'],
+ // '@typescript-eslint/no-misused-promises': ['warn'],
+ '@typescript-eslint/no-empty-function': 0,
+ // We use function hoisting
+ '@typescript-eslint/no-use-before-define': 0,
+ // We always want explicit typing, e.g `field: string = ''`
+ '@typescript-eslint/no-inferrable-types': 0,
+ '@typescript-eslint/restrict-template-expressions': 0,
+ '@typescript-eslint/explicit-module-boundary-types': 0,
+ '@typescript-eslint/require-await': 0,
+ '@typescript-eslint/no-unsafe-return': 0,
+ '@typescript-eslint/no-unsafe-call': 0,
+ '@typescript-eslint/no-empty-interface': 0,
+ '@typescript-eslint/restrict-plus-operands': 0
+ }
+ },
+ {
+ // scripts use devDependencies
+ files: ['*worker*.js', '**/worker-utils/**/*.js'],
+ env: {
+ browser: true,
+ es2020: true,
+ node: true,
+ worker: true
+ }
+ },
+ // tests are run with aliases set up in node and webpack.
+ // This means lint will not find the imported files and generate false warnings
+ {
+ // scripts use devDependencies
+ files: ['**/test/**/*.js', '**/scripts/**/*.js', '*.config.js', '*.config.local.js'],
+ rules: {
+ 'import/no-unresolved': 0,
+ 'import/no-extraneous-dependencies': 0
+ }
+ },
+ {
+ files: ['examples/**/*.js'],
+ rules: {
+ 'import/no-unresolved': 0
+ }
+ }
+ ],
+
+ settings: {
+ // Ensure eslint finds typescript files
+ 'import/resolver': {
+ node: {
+ extensions: ['.js', '.jsx', '.mjs', '.ts', '.tsx']
+ }
+ }
+ }
+ }
+});
+
+// config.overrides[1].parserOptions = {
+// project: ['./tsconfig.json']
+// };
+
+// Uncomment to log the eslint config
+// console.debug(JSON.stringify(config, null, 2));
+
+module.exports = config;
diff --git a/.eslintrc.js b/.eslintrc.js
deleted file mode 100644
index a99ddae4a8..0000000000
--- a/.eslintrc.js
+++ /dev/null
@@ -1,120 +0,0 @@
-const {getESLintConfig, deepMerge} = require('ocular-dev-tools');
-
-const defaultConfig = getESLintConfig({react: '16.8.2'});
-
-// Make any changes to default config here
-const config = deepMerge(defaultConfig, {
- parserOptions: {
- project: ['./tsconfig.json']
- },
-
- env: {
- browser: true,
- es2020: true,
- node: true
- },
-
- rules: {
- indent: 0,
- 'import/no-unresolved': 0,
- 'no-console': 1,
- 'no-continue': ['warn'],
- 'callback-return': 0,
- 'max-depth': ['warn', 4],
- complexity: ['warn'],
- 'max-statements': ['warn'],
- 'default-case': ['warn'],
- 'no-eq-null': ['warn'],
- eqeqeq: ['warn'],
- radix: 0
- // 'accessor-pairs': ['error', {getWithoutSet: false, setWithoutGet: false}]
- },
-
- overrides: [
- {
- files: ['**/*.ts', '**/*.tsx', '**/*.d.ts'],
- rules: {
- // For parquet module
- '@typescript-eslint/no-non-null-assertion': 0,
- '@typescript-eslint/no-non-null-asserted-optional-chain': 0,
- '@typescript-eslint/no-floating-promises': 0,
- // Gradually enable
- '@typescript-eslint/ban-ts-comment': 0,
- '@typescript-eslint/ban-types': 0,
- '@typescript-eslint/no-unsafe-member-access': 0,
- '@typescript-eslint/no-unsafe-assignment': 0,
- '@typescript-eslint/no-var-requires': 0,
- '@typescript-eslint/no-unused-vars': [
- 'warn',
- {vars: 'all', args: 'none', ignoreRestSiblings: false}
- ],
- // We still have some issues with import resolution
- 'import/named': 0,
- 'import/no-extraneous-dependencies': 0, // ['warn'], disable for test folder only...
- // Warn instead of error
- // 'max-params': ['warn'],
- // 'no-undef': ['warn'],
- // camelcase: ['warn'],
- // '@typescript-eslint/no-floating-promises': ['warn'],
- // '@typescript-eslint/await-thenable': ['warn'],
- // '@typescript-eslint/no-misused-promises': ['warn'],
- '@typescript-eslint/no-empty-function': ['warn', {allow: ['arrowFunctions']}],
- // We use function hoisting
- '@typescript-eslint/no-use-before-define': 0,
- // We always want explicit typing, e.g `field: string = ''`
- '@typescript-eslint/no-inferrable-types': 0,
- '@typescript-eslint/restrict-template-expressions': 0,
- '@typescript-eslint/explicit-module-boundary-types': 0,
- '@typescript-eslint/require-await': 0,
- '@typescript-eslint/no-unsafe-return': 0,
- '@typescript-eslint/no-unsafe-call': 0,
- '@typescript-eslint/no-empty-interface': 0,
- '@typescript-eslint/restrict-plus-operands': 0
- }
- },
- {
- // scripts use devDependencies
- files: ['*worker*.js', '**/worker-utils/**/*.js'],
- env: {
- browser: true,
- es2020: true,
- node: true,
- worker: true
- }
- },
- // tests are run with aliases set up in node and webpack.
- // This means lint will not find the imported files and generate false warnings
- {
- // scripts use devDependencies
- files: ['**/test/**/*.js', '**/scripts/**/*.js', '*.config.js', '*.config.local.js'],
- rules: {
- 'import/no-unresolved': 0,
- 'import/no-extraneous-dependencies': 0
- }
- },
- {
- files: ['examples/**/*.js'],
- rules: {
- 'import/no-unresolved': 0
- }
- }
- ],
-
- settings: {
- // Ensure eslint finds typescript files
- 'import/resolver': {
- node: {
- extensions: ['.js', '.jsx', '.mjs', '.ts', '.tsx']
- }
- }
- }
-});
-
-// config.overrides[1].parserOptions = {
-// project: ['./tsconfig.json']
-// };
-
-// Uncomment to log the eslint config
-// console.debug(config);
-
-module.exports = config;
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 8e1cb85c96..1fc7ee5407 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- node-version: [16]
+ node-version: [16, 18, 20]
steps:
- uses: actions/checkout@v3
@@ -33,7 +33,7 @@ jobs:
npm run test ci
- name: Coveralls
- if: matrix.node-version == 16
+ if: matrix.node-version == 18
uses: coverallsapp/github-action@master
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
index 34cace2602..5601a0513a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,7 @@
# tests
test/data/test.png
tmp
+!modules/zip/test/data/**/*.zip
# dists
dist/*
diff --git a/.ocularrc.cjs b/.ocularrc.cjs
deleted file mode 100644
index c3494b0d1c..0000000000
--- a/.ocularrc.cjs
+++ /dev/null
@@ -1,35 +0,0 @@
-/** @typedef {import('ocular-dev-tools').OcularConfig} OcularConfig */
-const {resolve} = require('path');
-
-/** @type {OcularConfig} */
-const config = {
- aliases: {
- test: resolve(__dirname, 'test')
- },
-
- browserTest: {
- server: {wait: 5000}
- },
-
- babel: {
- // extensions: ['.js', '.jsx', '.mjs', '.ts', '.tsx']
- },
-
- lint: {
- // TODO - comment out while getting typescript to work
- paths: ['dev-docs', 'docs', 'modules'] // 'examples', test', 'website', 'examples'],
- // extensions: ['js', 'jsx', 'mjs', 'ts', 'tsx', 'md']
- },
-
- webpack: {},
-
- entry: {
- test: 'test/node.js',
- 'test-browser': 'test/browser.js',
- bench: 'test/bench/node.js',
- 'bench-browser': 'test/bench/browser.js',
- size: 'test/size/import-nothing.js'
- }
-};
-
-module.exports = config;
diff --git a/.ocularrc.js b/.ocularrc.js
new file mode 100644
index 0000000000..fd141f228e
--- /dev/null
+++ b/.ocularrc.js
@@ -0,0 +1,37 @@
+import {resolve} from 'path';
+
+export default {
+ aliases: {
+ test: resolve('./test')
+ },
+
+ typescript: {
+ project: 'tsconfig.build.json'
+ },
+
+ bundle: {
+ globalName: 'loader',
+ externals: ['fs', 'path', 'util', 'events', 'stream', 'crypto', 'http', 'https'],
+ target: ['supports async-functions', 'not dead'],
+ format: 'umd',
+ globals: {
+ '@loaders.gl/*': 'globalThis.loaders'
+ }
+ },
+
+ lint: {
+ // TODO - comment out while getting typescript to work
+ paths: ['dev-docs', 'docs', 'modules'] // 'examples', test', 'website', 'examples'],
+ // extensions: ['js', 'jsx', 'mjs', 'ts', 'tsx', 'md']
+ },
+
+ webpack: {},
+
+ entry: {
+ test: 'test/node.ts',
+ 'test-browser': 'test/browser.ts',
+ bench: 'test/bench/node.js',
+ 'bench-browser': 'test/bench/browser.js',
+ size: 'test/size/import-nothing.js'
+ }
+};
diff --git a/.prettierignore b/.prettierignore
index 4dcd12f0fe..c6ae6c21f2 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -7,6 +7,11 @@ node_modules/
.cache
public
+modules/core/src/iterators/make-stream/make-node-stream.ts
+
+modules/loader-utils/src/lib/files/node-file-facade.ts
+modules/loader-utils/src/lib/filesystems/node-filesystem-facade.ts
+
modules/3d-tiles/test/lib/classes/tile-3d-batch-table-hierarchy.spec.ts
modules/bson/src/bson-writer.ts
@@ -15,6 +20,7 @@ modules/bson/src/lib/encoders/encode-bson.ts
modules/bson/src/lib/parsers/parse-bson.ts
modules/bson/test/data/js-bson/corrupt.ts
+modules/parquet/src/polyfills/buffer/buffer.ts
modules/parquet/test
examples/experimental/gltf-with-raw-webgl/
@@ -35,6 +41,8 @@ modules/mvt/src/lib/geojson-tiler/tile.ts
modules/parquet/test
+modules/textures/test/basis-loader.spec.ts
+
modules/xml/test/sax-ts/testcases/attribute-name.spec.ts
modules/xml/test/sax-ts/testcases/opentagstart.spec.ts
modules/xml/test/sax-ts/testcases/script-close-better.spec.ts
diff --git a/.prettierrc b/.prettierrc
new file mode 100644
index 0000000000..bffdbcd710
--- /dev/null
+++ b/.prettierrc
@@ -0,0 +1,5 @@
+printWidth: 100
+semi: true
+singleQuote: true
+trailingComma: none
+bracketSpacing: false
diff --git a/.prettierrc.js b/.prettierrc.js
deleted file mode 100644
index 152d41cdf2..0000000000
--- a/.prettierrc.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const {getPrettierConfig, deepMerge} = require('ocular-dev-tools');
-
-const config = getPrettierConfig({react: '16.8.2'});
-
-// Make any changes to default config here
-
-// Uncomment to log the eslint config
-// console.debug(config);
-
-module.exports = config;
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6fd33c8370..8db7d7d40b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,10 +1,278 @@
# CHANGELOG for loaders.gl
+## v4.0
+
+### v4.0.3
+
+- feat(tile-converter): estimation of time remaining (#2774)
+- fix: Revert parquet-wasm integration (#2781)
+- fix(Arrow): featureIds not correctly parsed from MultiPolygon w/ holes in arrow util (WIP) (#2777)
+- fix: Use "latest" version tag when loading from unpkg (#2779)
+- docs(arrowjs): Update Arrow docs and release notes (#2778)
+- fix(examples): run 'geospatial' locally (#2776)
+- chore: Update all dependencies to ^4.0.0 (#2775)
+- feat(parquet): Enable Parquet WASM loader (#2773)
+- fix(3d-tiles): Enable Tiles3DLoader tests (#2771)
+- chore: Dependencies (#2772)
+- chore: parseFile accepts `ReadableFile` (#2770)
+- chore(excel): Fix batched loader adapter from atomic parse (#2769)
+- chore(loader-utils): split Worker/WorkerWithEncoder types (#2768)
+
+### v4.0.2
+
+- test: run workers from source code (#2762)
+- feat(schema): makeTableFromBatches (#2767)
+- chore: Remove Buffer in test cases (#2766)
+- chore(Arrow): add test cases for geoarrow to binary geometries (#2765)
+- chore: Adopt namespace style imports for apache-arrow (#2764)
+- fix get arrow bound function; add test case (#2763)
+- fix(kml): Fix TXCLoader default shape (#2761)
+- chore: Improve docs (#2758)
+- fix(website): Unbreak website build (#2756)
+- chore: fix 4.0 peer dependencies (#2755)
+
+### v4.0.1
+
+- chore(textures): enable tests (#2741)
+- feat(gis): Consolidate geo metadata detection and table conversion (#2742)
+- fix(zip): cd header zip64 data reading fix (#2710)
+- feat(arrow): GeoArrow utilities (#2744)
+- Got rid of .toString() usage for ArrayBuffers (#2743)
+- chore: Add some javascript API guidelines (#2747)
+- Update 3D Tiles Docs (#2749)
+- feat(mvt): MVTileSource (#2750)
+- chore: improve test coverage (#2751)
+- docs: Clean up website links (#2748)
+- refactor(tile-converter): refactor creation of Attribute info (#2718)
+- feat(tile-converter): conversion progress (#2739)
+- chore(shapefile): Improve Shapefile format doc (#2752)
+- fix(tile-converter): i3s-server - esm compatibility (#2745)
+
## v4.0 Prerelease
> The official 4.0 alpha track starts with alpha.6
> The early pre-release track was abandoned due to build incompatibility problems.
-release info (#2491))
+> release info (#2491))
+
+### v4.0.0-beta.8
+
+- Update gltf.md. (#2733)
+- fix(website): restore I3S examples (#2734)
+- fix: render test import (#2731)
+- chore(crypto): Restore crypto tests (#2730)
+- chore: Clean up license text (#2729)
+- chore(i3s): Export a function customizeColors from i3s lib utils (#2719)
+- added test for conversion 64-bit attributes to strings (#2728)
+
+### v4.0.0-beta.7
+
+- fix(i3s): Remove luma.gl dependency (#2727)
+- feat(flatgeobuf): Upgrade to latest flatgeobuf (#2684)
+- feat(lerc): Break out LERCLoader into its own module (size and bundling issues) (#2724)
+- chore(polyfills): Bump deps (#2723)
+- feat(polyfills): Add installFilePolyfills on Node.js (#2722)
+- fix(i3s): I3SContentLoader regression (#2713)
+
+### v4.0.0-beta.6
+
+- fix(polyfills): Add CJS export for node.js (#2720)
+- feat(wms): Restore LERCLoader (#2715)
+- chore: Remove deprecated APIs and update docs (#2714)
+
+### v4.0.0-beta.5
+
+- Path fix (#2709)
+- fix(gltf, tile-converter): attributeStorageInfo, use class name (#2673)
+- chore: Add CI for Node 20 (#2712)
+- fix(tile-converter): enable tests (#2708)
+- chore: Bump to Node 18 (#2711)
+- docs (whats-new): Update whats-new.mdx for 4.0 loaders.gl release (#2702)
+- feat(geopackage): Upgrade and modernize (#2704)
+
+### v4.0.0-beta.4
+
+- fix(tile-converter): cli tools (#2707)
+- feat(tile-converter): test for conversion arrays to attribute of string type (#2703)
+- chore(polyfills): Consolidate node code (#2701)
+- fix(i3s): handle search params in I3SLoader (#2692)
+
+### v4.0.0-beta.6
+
+- feat(tile-converter): --analyze-only option (#2694)
+- fix(tiles): cartographicToCartesan syntax (#2690)
+- chore(website): Restore website (#2689)
+- fix(wms): WMS 1.3.0 compatability on GetFeatureInfo (#2680)
+- chore: Prep for Node18 support (#2699)
+- chore: math.gl@4.0.0 (#2698)
+- fix(gltf): fix of getTypedArrayForAccessor in gltf-scenegraph (#2683)
+- chore(schema): Move arrow dependencies to arrow module (#2697)
+- chore: Upgrade to math.gl@4.0.0-beta.1. Remove gl-matrix (#2696)
+- chore: Restore library loading (#2686)
+- fix(tiles): convert region to obb (#2685)
+- feat: Move to ES modules, upgrade dev-tools (#2681)
+- feat(mvt): Add MVTSource (#2674)
+- chore(core): Remove writeSync, save and fs dependencies (#2678)
+- feat(loader-utils): Refactor FileSystem to be independent of fs (#2676)
+- chore: Remove Buffer usage (#2675)
+- chore(zip): Refactor zip content hash tables (#2500)
+- chore(polyfills): Remove Promise.allSettled polyfill (#2672)
+
+### v4.0.0-beta.2
+
+- fix: getting tile url with empty query params (#2671)
+- chore(polyfills): Start moving Node.js code into polyfills (#2669)
+
+### v4.0.0-beta.1
+
+- feat(tile-converter): support of 64-bit int (#2670)
+- feat(gltf): added support of arrays to ext-feature-metadata (#2663)
+- feat(mvt): Add TileJSONLoader (#2666)
+- feat(pmtiles): Create PMTileSource from Blob (#2668)
+- feat(wms): Separate WMSSource and WMSService (#2667)
+- fix: remove unused ts directive (#2665)
+- Move master to 4.0-beta tags (#2661)
+- feat(pmtools): Add vector tile support (#2664)
+- docs: Improved release notes
+- feat(pmtiles): Support for pmtiles format (#2662)
+- Website: Geoparquet example (#2660)
+- fix(parse-i3s): getting root node url for normalizeTilesetData without nodepages (#2659)
+
+### v4.0.0-alpha.26
+
+- Fixes for deck.gl 8.10 (#2658)
+- feat(crypto): Add encoding parameter for hashes (#2657)
+
+### v4.0.0-alpha.25
+
+- fix(gltf): tests for ext-feature-metadata (#2656)
+- fix(gltf, converter): make ext-mesh-features independent from ext-structural-metadata (#2655)
+- batch types (#2645)
+- chore(twkb): Add TWKBLoader tests (#2653)
+- feat(tile-converter): select metadata class from EXT_structural_metadata (#2647)
+- feat: new geoparquet example (#2646)
+- feat(wkt): Add TWKBLoader/Writer (#2028)
+- feat(wkb): Auto-detect WKB dialect and encoding (#2184)
+- feat(wkb): New HexWKBLoader for hex encoded WKB (#2652)
+- chore(worker-utils): Improve version handling (#2651)
+- chore: geoparquet prep (#2650)
+- feat(wkt): Add WKTCRSLoader/Writer (#2649)
+- docs(release-notes): Loaders 4.0 upcoming release notes (#2648)
+- docs: Add whats-new and upgrade-guide to arrowjs docs (#2636)
+- feat(schema): Make geojson-table compatible with GeoJSON (#2644)
+- docs(tile-converter): metadata class selection (#2642)
+- chore(tile-converter): rename (#2641)
+- chore(parquet): Add Buffer polyfill to parquet to avoid bundler complications (#2643)
+
+### v4.0.0-alpha.24
+
+- fix(tile-converter): geometry attributes reordering performance (#2640)
+- fix(tile-converter): EXT_feature_metadata conversion (#2639)
+- feat(gltf): EXT_feature_metadata - numeric types support (#2634)
+- chore(gltf): 3d-tiles extensions refactoring (#2633)
+- chore(draco): Upgrade to draco3d v1.5.6 (#2638)
+- Fix browser exclude (#2596)
+- docs: Consolidate whats-new (merge duplications) (#2637)
+- feat(arrow): upgrade to apache-arrow v13 (#2632)
+- feat(arrow): Typed apache arrow loader (#2631)
+- chore: More typed loaders (#2630)
+- chore(gis): Add typescript types (#2629)
+- docs(i3s): fix formats and english (#2628)
+- docs(i3s): I3S receipts (#2627)
+- chore: Type 3d-tile and I3S loaders. (#2606)
+
+### v4.0.0-alpha.23
+
+- chore: Add loader type parameters (#2626)
+- feat(tile-converter): support EXT_mesh_features and EXT_structural_metadata (#2566)
+- feat(core): non-specific parse functions return unknown (#2625)
+- chore(csv): Ensure tests use typed CSVLoader (#2621)
+- docs(core): Typed loaders (#2624)
+- chore(zip): Remove zip module dependency on @loaders.gl/core (#2622)
+- chore: Clean up module imports, remove default exports in images module (#2617) (#2623)
+
+### v4.0.0-alpha.22
+
+- fix(zip): @loaders.gl/core dependency (#2620)
+- feat(tile-converter): support 3tz (#2609)
+- chore(core): Reduce use of implicit any, move test files to .ts (#2619)
+- chore: Use parseFromContext in subloaders (#2616)
+- feat(loader-utils): Type safe context parsers for sub loaders (#2613)
+- feat(3d-tiles): some improvements (#2610)
+
+### v4.0.0-alpha.21
+
+- feat(core): parseSync, parseInBatches, load, loadInBatches type inference (#2612)
+- feat: More typed loaders (#2607)
+- feat(3d-tiles): 3tz loader (#2578)
+- feat(zip): ZipFileSystem (#2602)
+- chore(i3s): Hash generation moved to @loader.gl/zip (#2599)
+- chore(zip): read file classes (#2601)
+- chore(zip): Compression method added for local header (#2600)
+- chore(compression): Added raw mode for deflate-compresion module (#2598)
+
+### v4.0.0-alpha.20
+
+- chore(i3s): Hash file utility moved to loader-utils (#2595)
+- chore(i3s): Zip parse refactoring (#2594)
+- fix(core): fetchOptions regression (#2591)
+- chore(tile-converter): remove CesiumION tokens (#2592)
+- feat(tile-converter): select metadata classes (#2590)
+- fix(tile-converter): featureIds + uvRegions (#2588)
+
+### v4.0.0-alpha.19
+
+- fix(tile-converter): CLI startup script (#2587)
+- feat(tile-converter): i3s - offline conversion (#2579)
+- Handle empty childless tiles in TilesetTraverser (#2584)
+- fix(i3s): add to tileset data (#2585)
+- fix(tile-converter): fix loading buffers in preprocess-3d-tiles (#2572)
+
+### v4.0.0-alpha.18
+
+- fix(tile-converter): skip failing content (#2576)
+- fix: Bump and remove @xmldom/xmldom (input validation issue) (#2582)
+- docs(tile-converter): Add documentation for SLPK Extractor (#2567)
+- chore(core): Refactor fetchFile to handle Node.js local file loading (#2575)
+- chore(tile-converter): 3dtiles - exclude Tile3D and Tileset3D (#2574)
+
+### v4.0.0-alpha.17
+
+- docs(chore): core API documentation improvements (#2573)
+- Add triangulate property to geojsonToBinary (#2571)
+- fix(obj): Improved vertex colors parsing (#2569)
+
+### v4.0.0-alpha.16
+
+- chore(tile-converter): create SLPK hash during serve (#2565)
+- docs(tile-converter): I3S Server (#2564)
+- chore(tile-converter): i3s-server tests (#2563)
+- chore(deps): bump semver in /test/apps/typescript-test (#2544)
+- chore(deps): bump semver from 5.7.1 to 5.7.2 (#2545)
+- chore(tile-converter): i3s-server convert to ts (#2562)
+
+### v4.0.0-alpha.15
+
+- chore(tile-converter): bump i3s-server deps (#2561)
+- chore(tile-converter): Support for SLPKs larger than 2 Gb (#2547)
+- feat(tile-converter): i3s-server bundle (#2555)
+- chore(deps): bump semver from 5.7.1 to 5.7.2 in /website (#2546)
+- fix(docs): JSONLoader \_rootObjectBatches removed but not mentioned in upgrade guide (#2558)
+- chore(deps): bump word-wrap in /test/apps/typescript-test (#2559)
+- fix(tile-converter): CesiumION tileset URL (#2560)
+- chore: update CHANGELOG.md (#2551)
+- chore(tile-converter): update i3s-server manual (#2552)
+
+### v4.0.0-alpha.14
+
+- fix(3d-tiles): implicit tiling v1.1 (#2549)
+- fix(tile-converter): i3s->3dtiles regression (#2550)
+
+### v4.0.0-alpha.13
+
+- fix(gltf): 3D tiles extension types & docs (#2542)
+- fix(tile-converter): failing test (#2540)
+- chore: bump fast-xml-parser (#2538)
+- fix(3d-tiles): implicit tiling v1.1 (#2539)
### v4.0.0-alpha.12
@@ -83,6 +351,10 @@ release info (#2491))
## v3.4
+### v3.4.9
+
+- fix(obj): Improved OBJ vertex colors parsing (#2569)
+
### 3.4.2
- docs: Upgrade guide for `WMSCapabilities` type, link to CHANGELOG for patch release info
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index c950ca677f..fb6ca05cab 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -12,7 +12,7 @@ If you consider opening a PR, here is some documentation to get you started:
To contribute, you will likely want to clone the loaders.gl repository and start by making sure you can install, build and run tests.
-See the [developer guide](https://loaders.gl/docs/dev-env) on the loaders.gl website for more information on how to get your environment set up for loaders.gl development, including for Linux and Windows.
+See the [developer guide](https://loaders.gl/docs/developer-guide/dev-env) on the loaders.gl website for more information on how to get your environment set up for loaders.gl development, including for Linux and Windows.
## Community Governance
diff --git a/LICENSE b/LICENSE
index 5fe2837eff..7208843aec 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,9 +1,6 @@
-Copyright (c) 2015 Uber Technologies, Inc.
-
-This software includes parts of PhiloGL (https://github.com/philogb/philogl)
-under MIT license. PhiloGL parts Copyright © 2013 Sencha Labs.
+loaders.gl is licensed under the MIT license
-This software includes adaptations of postprocessing code from THREE.js (https://github.com/mrdoob/three.js/) under MIT license. Additional attribution given in specific source files. THREE.js parts Copyright © 2010-2018 three.js authors.
+Copyright (c) vis.gl contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@@ -23,8 +20,12 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
+---
+
+Copyright (c) 2015 Uber Technologies, Inc.
-loaders.gl includes certain files from Cesium (https://github.com/AnalyticalGraphicsInc/cesium) under the Apache 2 License:
+loaders.gl includes certain files from Cesium (https://github.com/AnalyticalGraphicsInc/cesium)
+under the Apache 2 License (found in the submodule: modules/3d-tiles):)
Copyright 2011-2018 CesiumJS Contributors
@@ -38,4 +39,3 @@ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
-Cesium-derived code can be found in the submodule: modules/3d-tiles
diff --git a/README.md b/README.md
index 3bf61551c5..ae46fdfd76 100644
--- a/README.md
+++ b/README.md
@@ -14,6 +14,10 @@
loaders.gl is extensively documented on the [loaders.gl](https://loaders.gl) website.
+## Contributing
+
+See [CONTRIBUTING.md](CONTRIBUTING.md).
+
## License
loaders.gl is licensed under a permissive open source license, using an MIT umbrella license.
diff --git a/babel.config.cjs b/babel.config.cjs
new file mode 100644
index 0000000000..e8f0890858
--- /dev/null
+++ b/babel.config.cjs
@@ -0,0 +1,22 @@
+// @ts-ignore
+const {getBabelConfig} = require('ocular-dev-tools/configuration');
+
+module.exports = getBabelConfig({
+ react: true,
+ plugins: [
+ // inject __VERSION__ from package.json
+ 'version-inline'
+ ],
+ ignore: [
+ // Don't transpile workers, they are transpiled separately
+ '**/*.worker.js',
+ '**/workers/*.js',
+ // Don't transpile files in libs, we use this folder to store external,
+ // already transpiled and minified libraries and scripts.
+ // e.g. draco, basis, las-perf etc.
+ /src\/libs/,
+ // babel can't process .d.ts
+ /\.d\.ts$/
+ ],
+ debug: false
+});
diff --git a/babel.config.js b/babel.config.js
deleted file mode 100644
index 8793ebb115..0000000000
--- a/babel.config.js
+++ /dev/null
@@ -1,26 +0,0 @@
-const {getBabelConfig, deepMerge} = require('ocular-dev-tools');
-
-module.exports = (api) => {
- const defaultConfig = getBabelConfig(api, {react: true});
-
- const config = deepMerge(defaultConfig, {
- plugins: [
- // inject __VERSION__ from package.json
- 'version-inline'
- ],
- ignore: [
- // Don't transpile workers, they are transpiled separately
- '**/*.worker.js',
- '**/workers/*.js',
- // Don't transpile files in libs, we use this folder to store external,
- // already transpiled and minified libraries and scripts.
- // e.g. draco, basis, las-perf etc.
- /src\/libs/,
- // babel can't process .d.ts
- /\.d\.ts$/
- ]
- });
-
- // console.debug(config);
- return config;
-};
diff --git a/dev-docs/RFCs/v3.4/data-source-rfc.md b/dev-docs/RFCs/v3.4/data-source-rfc.md
new file mode 100644
index 0000000000..52e8ba707a
--- /dev/null
+++ b/dev-docs/RFCs/v3.4/data-source-rfc.md
@@ -0,0 +1,41 @@
+# Data Sources API
+
+Build a data source API that can encompass services such
+- loaded data
+- URLS
+- tile service
+- WMS
+- Incremental fetch with range requests etc.
+- programmatic data generation
+- ...
+
+### Related
+
+- deck.gl has a semi-internal data source API.
+-
+
+
+
+## Main problems
+
+### Refresh / Dirty state handling.
+
+How does the application (typically deck.gl)) know when to redraw?
+
+```typescript
+DataSource.setNeedsRefresh();
+DataSource.getNeedsRefresh(clear: boolean = true);
+```
+
+## Updates
+
+`DataSource.setProps()`
+
+Typing is a bit messy when overriding child class definitions.
+
+## Declarative usage
+
+Fully declarative usage requires a lifecycle management system, which seems too heavy.
+
+
+
diff --git a/docs/README.mdx b/docs/README.mdx
index 92da2f1816..72c7f01daa 100644
--- a/docs/README.mdx
+++ b/docs/README.mdx
@@ -6,18 +6,26 @@
-This documentation describes loaders.gl **v4.0**. Docs for older versions are available on github:
+This documentation describes loaders.gl **v4.0**. See our [**release notes**](/docs/whats-new) to learn what is new.
+
+Docs for older versions are available on github:
**[v3.3](https://github.com/visgl/loaders.gl/blob/3.3-release/docs/README.md)**,
**[v2.3](https://github.com/visgl/loaders.gl/blob/2.3-release/docs/README.md)**,
**[v1.3](https://github.com/visgl/loaders.gl/blob/1.3-release/docs/README.md)**.
## Overview
-loaders.gl is a collection of open source loaders and writers for file formats including tabular, geospatial, and 3D formats. It is focused on supporting visualization and analytics of big data.
+loaders.gl is a collection of open source loaders and writers for various file formats,
+primarily focused on supporting visualization and analytics of big data.
+Tabular, geospatial, and 3D file formats are well covered.
-loaders.gl is packaged and published as a suite of composable loader modules offering consistent APIs and features across file formats, and offers advanced features such as running loaders on workers and incremental parsing, and all loaders work in both the browser and in Node.js.
+Published as a suite of composable loader modules with consistent APIs and features,
+offering advanced features such as worker thread and incremental parsing,
+loaders.gl aims to be a trusted companion when you need to load data into your application.
-By design, other [vis.gl frameworks](https://vis.gl/frameworks) such as [deck.gl](https://deck.gl) and [luma.gl](https://luma.gl) integrate seamlessly with loaders.gl, however loaders.gl itself has no dependencies on those frameworks, and all loaders and writers can be used with any JavaScript application or framework.
+While loaders.gl can be used with any JavaScript application or framework,
+[vis.gl frameworks](https://vis.gl/frameworks) such as [**deck.gl**](https://deck.gl)
+come pre-integrated with loaders.gl.
## Loaders
@@ -34,7 +42,8 @@ loaders.gl provides a wide selection of loaders organized into categories:
## Code Examples
-loaders.gl provides a small core API module with common functions to load and save data, and a range of optional modules that provide loaders and writers for specific file formats.
+loaders.gl provides a small core API module with common functions to load and save data,
+and a range of optional modules that provide loaders and writers for specific file formats.
A minimal example using the `load` function and the `CSVLoader` to load a CSV formatted table into a JavaScript array:
@@ -66,53 +75,50 @@ To quickly get up to speed on how the loaders.gl API works, please see [Get Star
## Supported Platforms
-loaders.gl provides consistent support for both browsers and Node.js. The following platforms are supported:
+loaders.gl supports both browsers and Node.js:
-- **Evergreen Browsers** loaders.gl supports recent versions of the major evergreen browsers (e.g. Chrome, Firefox, Safari) on both desktop and mobile.
-- **Node.js** LTS (Long-Term Support) [releases](https://nodejs.org/en/about/releases/) are also supported. Note that the `@loaders.gl/polyfills` module should be imported under Node.js. It installs the required Node.js polyfills for `fetch` etc.
-- **IE11** is no longer officially supported from v3.0, however loaders.gl 2.3 is known to run on IE11.
- - To run on IE11, both `@loaders.gl/polyfills` and additional appropriate polyfills (e.g. babel polyfills) need to be installed which will increase your application bundle size.
- - Note that because of lack of regular testing on IE11, regressions can occur, so pinning your loaders.gl versions in package.json is advisable.
- - For IE11, additional transpilation of loaders.gl packages in your `node_modules` folder may also be required.
-## Design Goals
+- **Evergreen Browsers** recent versions of major evergreen browsers (e.g. Chrome, Firefox, Safari) are supported on both desktop and mobile.
+- **Node.js** All current [LTS releases](https://nodejs.org/en/about/previous-releases) are supported.
-**Framework Agnostic** - Files are parsed into clearly documented data structures (objects + typed arrays) that can be used with any JavaScript framework.
+## Design Goals
-**Streaming Support** - Several loaders can parse in batches from both node and browser `Stream`s, allowing "larger than memory" files to be processed, and initial results to be available while the remainder of a file is still loading.
+**Framework Agnostic** - Files are parsed into clearly documented plain data structures (objects + typed arrays) that can be used with any JavaScript framework.
-**Browser Support** - loaders.gl supports recent versions of evergreen browsers.
+**Browser Support** - supports recent versions of evergreen browsers, and ensures that loaders are easy to bundle.
-**Worker Support** - Many loaders.gl loaders are automatically run in web workers, keeping the main thread free for other tasks while parsing completes.
+**Node Support** - loaders.gl can be used when writing backend and cloud services, and you can confidently run your unit tests under Node.
-**Node Support** - All loaders work under Node.js and can be used when writing backend and cloud services, and when running your unit tests under Node.
+**Worker Support** - Many loaders.gl loaders come with pre-built web workers, keeping the main thread free for other tasks while parsing completes.
-**Loader Categories** - loaders.gl groups similar data formats into "categories". loaders in the same category return parsed data in "standardized" form, making it easier to build applications that can handle multiple similar file formats.
+**Loader Categories** - loaders.gl groups similar data formats into "categories" that return parsed data in "standardized" form. This makes it easier to build applications that can handle multiple similar file formats.
**Format Autodection** - Applications can specify multiple loaders when parsing a file, and loaders.gl will automatically pick the right loader for a given file based on a combination of file/url extensions, MIME types and initial data bytes.
-**Bundle Size Reduction** - Loaders for each file format are published in independent npm modules to allow applications to cherry-pick only the loaders it needs. In addition, modules are optimized for tree-shaking, and many larger loader libraries and web workers are loaded from CDN on use and not included in your application bundle.
+**Streaming Support** - Many loaders can parse in batches from both node and WhatWG streams, allowing "larger than memory" files to be processed, and initial results to be available while the remainder of a file is still loading.
-**Modern JavaScript** - loaders.gl is written in standard ES2018 and the API emphasizes modern, portable JavaScript constructs, e.g. async iterators instead of streams, `ArrayBuffer` instead of `Buffer`, etc.
+**Composability and Bundle-Size Optimization** - Loaders for each file format are published in independent npm modules to allow applications to cherry-pick only the loaders it needs. In addition, modules are optimized for tree-shaking, and many larger loader libraries and web workers are loaded from CDN on use and not included in your application bundle.
**Binary Data** - loaders.gl is optimized to load into compact memory representations and use with WebGL frameworks (e.g. by returning typed arrays whenever possible). Note that in spite of the `.gl` naming, loaders.gl has no any actual WebGL dependencies and loaders can be used without restrictions in non-WebGL applications.
-**Multi-Asset Loading** - Some formats like glTF, Shapefile, or mip mapped / cube textures can require dozens of separate loads to resolve all linked assets (external buffers, images etc). Tracking all the resulting async loads can cause complications for applications. By default, loaders.gl loads all linked assets before resolving the returned `Promise`.
+**Multi-Asset Loading** - Formats like glTF, Shapefile, or mip mapped / cube textures can require dozens of separate loads to resolve all linked assets (external buffers, images etc). loaders.gl loads all linked assets before resolving the returned `Promise`.
+
+**Modern JavaScript** - loaders.gl is written in TypeScript 5.0 and standard ES2018, is packaged as ECMAScript modules, and the API emphasizes modern, portable JavaScript constructs, e.g. async iterators instead of streams, `ArrayBuffer` instead of `Buffer`, etc.
## Licenses
-loaders.gl itself is MIT licensed but various modules contain code under several permissive open source licenses, currently MIT, BSD and Apache licenses. Each loader module comes with its own license, so if the distinction matters to you, please check the documentation for each module and decide accordingly, however loaders.gl will never include code with non-permissive, commercial or copyLeft licenses.
+loaders.gl itself is MIT licensed, however various modules contain forked code under several permissive, compatible open source licenses, such as ISC, BSD and Apache licenses. Each loader module provides some license notes, so if the distinction matters to you, please check the documentation for each module and decide accordingly. We guarantee that loaders.gl will never include code with non-permissive, commercial or copy-left licenses.
## Credits and Attributions
-loaders.gl is maintained by a group of organizations collaborating through open governance under the Linux Foundation.
+loaders.gl is maintained by a group of organizations collaborating through open governance under the OpenJS and Linux Foundations.
-While loaders.gl contains a lot of original code, it is also partly a repackaging of superb work done by others in the open source community. We try to be as explicit as we can about the origins and attributions of each piece of code, both in the documentation page for each module and in the preservation of comments relating to authorship and contributions inside forked source code.
+While loaders.gl contains substantial amounts of original code, it also repackages lots of superb work done by others in the open source community. We try to be as explicit as we can about the origins and attributions of each piece of code, both in the documentation page for each module and in the preservation of comments relating to authorship and contributions inside forked source code.
Even so, we can make mistakes, and we may not have the full history of the code we are reusing. If you think that we have missed something, or that we could do better in regard to attribution, please let us know.
-### Primary maintainers
+## Primary maintainers
-The organizations and individuals that contribute most significantly to the development and maintenance of loaders.gl are:
+Organizations that currently contribute most significantly to the development and maintenance of loaders.gl:
diff --git a/docs/arrowjs/api-reference/builder.md b/docs/arrowjs/api-reference/builder.md
new file mode 100644
index 0000000000..ab5280ae9c
--- /dev/null
+++ b/docs/arrowjs/api-reference/builder.md
@@ -0,0 +1,87 @@
+# Builders
+
+
+ The `makeBuilder()` function creates a `Builder` instance that is set up to build
+ a columnar vector of the supplied `DataType`.
+
+ A `Builder` is responsible for writing arbitrary JavaScript values
+ to ArrayBuffers and/or child Builders according to the Arrow specification
+ or each DataType, creating or resizing the underlying ArrayBuffers as necessary.
+
+ The `Builder` for each Arrow `DataType` handles converting and appending
+ values for a given `DataType`.
+
+ Once created, `Builder` instances support both appending values to the end
+ of the `Builder`, and random-access writes to specific indices
+ `builder.append(value)` is a convenience method for
+ builder.set(builder.length, value)`). Appending or setting values beyond the
+ uilder's current length may cause the builder to grow its underlying buffers
+ r child Builders (if applicable) to accommodate the new values.
+
+ After enough values have been written to a `Builder`, `builder.flush()`
+ ill commit the values to the underlying ArrayBuffers (or child Builders). The
+ nternal Builder state will be reset, and an instance of `Data
+
+
+
+
@@ -18,7 +18,7 @@ Loader for the [FlatGeobuf](http://flatgeobuf.org/) format, a binary FlatBuffers
## Usage
-```js
+```typescript
import {FlatGeobufLoader} from '@loaders.gl/flatgeobuf';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/geopackage/README.md b/docs/modules/geopackage/README.md
index ddf31b3af7..18598c78d7 100644
--- a/docs/modules/geopackage/README.md
+++ b/docs/modules/geopackage/README.md
@@ -1,4 +1,4 @@
-# Overview
+# Overview 🚧
![ogc-logo](../../images/logos/ogc-logo-60.png)
diff --git a/docs/modules/geopackage/api-reference/geopackage-loader.md b/docs/modules/geopackage/api-reference/geopackage-loader.md
index a8b54f02d0..dff6dae211 100644
--- a/docs/modules/geopackage/api-reference/geopackage-loader.md
+++ b/docs/modules/geopackage/api-reference/geopackage-loader.md
@@ -1,4 +1,6 @@
-# GeoPackageLoader
+# GeoPackageLoader 🚧
+
+![ogc-logo](../../../images/logos/ogc-logo-60.png)
@@ -18,40 +20,45 @@ GeoPackage loader
## Usage
-```js
+To load all tables in a geopackage file as GeoJSON:
+
+```typescript
import {GeoPackageLoader, GeoPackageLoaderOptions} from '@loaders.gl/geopackage';
import {load} from '@loaders.gl/core';
import {Tables, ObjectRowTable, Feature} from '@loaders.gl/schema';
const optionsAsTable: GeoPackageLoaderOptions = {
geopackage: {
+ shape: 'tables',
sqlJsCDN: 'https://cdnjs.cloudflare.com/ajax/libs/sql.js/1.5.0/'
- },
- gis: {
- format: 'tables'
}
};
-const tablesData: Tables
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/modules/parquet/api-reference/parquet-loader.md b/docs/modules/parquet/api-reference/parquet-loader.md
index be37e07927..e44b611cce 100644
--- a/docs/modules/parquet/api-reference/parquet-loader.md
+++ b/docs/modules/parquet/api-reference/parquet-loader.md
@@ -1,4 +1,4 @@
-# ParquetLoader
+# ParquetLoader 🆕 🚧
diff --git a/docs/modules/parquet/api-reference/parquet-writer.md b/docs/modules/parquet/api-reference/parquet-writer.md
index ac4d488106..da8d9f81ec 100644
--- a/docs/modules/parquet/api-reference/parquet-writer.md
+++ b/docs/modules/parquet/api-reference/parquet-writer.md
@@ -1,4 +1,4 @@
-# ParquetWriter
+# ParquetWriter 🆕 🚧
@@ -10,4 +10,4 @@
The Parquet format supports a large set of features (data types, encodings, compressions, encryptions etc) it require time and contributions for the loaders.gl implementation to provide support for all variations.
-Please refer to the detailed information about which [Parquet format features](/docs/modules/parquet/format/parquet) are supported.
+Please refer to the detailed information about which [Parquet format features](/docs/modules/parquet/formats/parquet) are supported.
diff --git a/docs/modules/parquet/formats/geoparquet.md b/docs/modules/parquet/formats/geoparquet.md
index 851957ef3a..6665b0584f 100644
--- a/docs/modules/parquet/formats/geoparquet.md
+++ b/docs/modules/parquet/formats/geoparquet.md
@@ -7,9 +7,11 @@ Geoparquet is a set of conventions for storing geospatial data in Parquet files.
Standardization is happening at [geoparquet.org](https://geoparquet.org).
-GeoParquet is similar to GeoArrow, as both a binary columnar formats with a high degree of similarity.
+GeoParquet file is a Parquet file that additionally follows these conventions:
-Essentially a GeoParquet file is a Parquet file that follows these conventions:
+- Geospatial metadata describing any geospatial columns is stored in the Parquet file's schema metadata (as stringified JSON).
+- Geometry columns are [WKB](/docs/modules/wkt/formats/wkb) encoded (additional encodings will likely be added)/
-- JSON encoded metadata stored in the Parquet file's schema metadata.
-- WKB encoded geometry columns
+## Alternatives
+
+GeoParquet can be compared to GeoArrow, as both are binary columnar formats with a high degree of similarity.
diff --git a/docs/modules/parquet/formats/parquet.md b/docs/modules/parquet/formats/parquet.md
index e70d8c8e08..2154dc93eb 100644
--- a/docs/modules/parquet/formats/parquet.md
+++ b/docs/modules/parquet/formats/parquet.md
@@ -7,15 +7,14 @@ Parquet is a binary columnar format optimized for compact storage on disk.
The GitHUB specification of [Apache Parquet](https://github.com/apache/parquet-format/blob/master/README.md).
-## Column encodings
+## Pages
-Some encodings are intended to improve successive column compression by organizing data so that it is less random.
+columns can be divided into pages (similar to Apache Arrow record batches) so that partial columns covering a range of rows can be read without reading the entire file.
-## Compared to similar formats
+## Alternatives
In contrast to Arrow which is designed to minimize serialization and deserialization, Parquet is optimized for storage on disk.
-
## Compression
Since Parquet is designed for read-write access, compression is applied per column chunk.
@@ -24,28 +23,30 @@ A wide range of compression codecs are supported. Internal parquet compression f
| Type | Read | Write |
| -------------- | ---- | ----- |
-| `UNCOMPRESSED` | YES | YES |
-| `GZIP` | YES | YES |
-| `SNAPPY` | YES | YES | |
-| `BROTLI` | YES | No | |
-| `LZO` | NO | NO | There is currently no readily available browser-based LZO module for JS |
-| `LZ4` | YES | YES |
-| `LZ4_RAW` | YES | YES |
-| `ZSTD` | YES | YES | |
+| `UNCOMPRESSED` | ✅ | ✅ |
+| `GZIP` | ✅ | ✅ |
+| `SNAPPY` | ✅ | ✅ | |
+| `BROTLI` | ✅ | No | |
+| `LZO` | ❌ | ❌ | There is currently no readily available browser-based LZO module for JS |
+| `LZ4` | ✅ | ✅ |
+| `LZ4_RAW` | ✅ | ✅ |
+| `ZSTD` | ✅ | ✅ | |
## Encoding
+Some encodings are intended to improve successive column compression by organizing data so that it is less random.
+
The following Parquet encodings are supported:
| Encoding | Read | Write | Types |
| ------------------------- | ---- | ----- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
-| `PLAIN` | YES | YES | All |
-| `PLAIN_DICTIONARY` | YES | YES | All |
-| `RLE_DICTIONARY` | YES | NO | All |
-| `DELTA_BINARY_PACKED` | NO | NO | `INT32`, `INT64`, `INT_8`, `INT_16`, `INT_32`, `INT_64`, `UINT_8`, `UINT_16`, `UINT_32`, `UINT_64`, `TIME_MILLIS`, `TIME_MICROS`, `TIMESTAMP_MILLIS`, `TIMESTAMP_MICROS` |
-| `DELTA_BYTE_ARRAY` | NO | NO | `BYTE_ARRAY`, `UTF8` |
-| `DELTA_LENGTH_BYTE_ARRAY` | NO | NO | `BYTE_ARRAY`, `UTF8` |
+| `PLAIN` | ✅ | ✅ | All |
+| `PLAIN_DICTIONARY` | ✅ | ✅ | All |
+| `RLE_DICTIONARY` | ✅ | ❌ | All |
+| `DELTA_BINARY_PACKED` | ❌ | ❌ | `INT32`, `INT64`, `INT_8`, `INT_16`, `INT_32`, `INT_64`, `UINT_8`, `UINT_16`, `UINT_32`, `UINT_64`, `TIME_MILLIS`, `TIME_MICROS`, `TIMESTAMP_MILLIS`, `TIMESTAMP_MICROS` |
+| `DELTA_BYTE_ARRAY` | ❌ | ❌ | `BYTE_ARRAY`, `UTF8` |
+| `DELTA_LENGTH_BYTE_ARRAY` | ❌ | ❌ | `BYTE_ARRAY`, `UTF8` |
## Repetition
@@ -53,9 +54,9 @@ There are three repetition types in Parquet:
| Repetition | Supported |
| ---------- | --------- |
-| `REQUIRED` | YES |
-| `OPTIONAL` | YES |
-| `REPEATED` | YES |
+| `REQUIRED` | ✅ |
+| `OPTIONAL` | ✅ |
+| `REPEATED` | ✅ |
### Record Shredding
diff --git a/docs/modules/pcd/api-reference/pcd-loader.md b/docs/modules/pcd/api-reference/pcd-loader.md
index d9716cb23e..b63885c919 100644
--- a/docs/modules/pcd/api-reference/pcd-loader.md
+++ b/docs/modules/pcd/api-reference/pcd-loader.md
@@ -16,7 +16,7 @@ Note: Currently supports `ascii`, `binary` and compressed binary files.
## Usage
-```js
+```typescript
import {PCDLoader} from '@loaders.gl/pcd';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/ply/api-reference/ply-loader.md b/docs/modules/ply/api-reference/ply-loader.md
index b6d2d5707a..2e84ef6e10 100644
--- a/docs/modules/ply/api-reference/ply-loader.md
+++ b/docs/modules/ply/api-reference/ply-loader.md
@@ -14,7 +14,7 @@ The `PLYLoader` parses simple meshes in the Polygon File Format or the Stanford
## Usage
-```js
+```typescript
import {PLYLoader} from '@loaders.gl/ply';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/pmtiles/README.md b/docs/modules/pmtiles/README.md
new file mode 100644
index 0000000000..bcce3fdf12
--- /dev/null
+++ b/docs/modules/pmtiles/README.md
@@ -0,0 +1,3 @@
+# @loaders.gl/pmtiles
+
+Support for loading [pmtiles](/docs/modules/pmtiles/formats/pmtiles) format tiles.
diff --git a/docs/modules/pmtiles/api-reference/pmtiles-source.md b/docs/modules/pmtiles/api-reference/pmtiles-source.md
new file mode 100644
index 0000000000..ec1d342cae
--- /dev/null
+++ b/docs/modules/pmtiles/api-reference/pmtiles-source.md
@@ -0,0 +1,28 @@
+# PMTilesSource 🆕
+
+The `PMTilesSource` reads individual tiles from a PMTiles archive file.
+
+| Loader | Characteristic |
+| --------------------- | ----------------------------------------------- |
+| File Extension | `.pmtiles` |
+| File Type | Binary Archive |
+| File Format | [PMTiles](/docs/modules/pmtiles/formats/pmtiles) |
+| Data Format | Metadata |
+| Decoder Type | Asynchronous |
+| Worker Thread Support | No |
+| Streaming Support | No |
+
+## Usage
+
+```typescript
+import {PMTilesSource} from '@loaders.gl/pmtiles';
+import {load} from '@loaders.gl/core';
+
+const source = new PMTilesSource({url});
+const tile = await source.getTile(...);
+```
+
+## Options
+
+| Option | Type | Default | Description |
+| ------ | ---- | ------- | ----------- |
diff --git a/docs/modules/pmtiles/formats/pmtiles.md b/docs/modules/pmtiles/formats/pmtiles.md
new file mode 100644
index 0000000000..a71c97e939
--- /dev/null
+++ b/docs/modules/pmtiles/formats/pmtiles.md
@@ -0,0 +1,164 @@
+# PMTiles
+
+PMTiles is a single-file archive format for tiled data designed to enable individual tiles to be loaded via HTTP range request access. A PMTiles archive can be hosted on a commodity storage platform such as Amazon S3.
+
+- *[PMTiles](https://github.com/protomaps/PMTiles)*
+
+## Overview
+
+TBA
+
+## Versions
+
+## Version 3
+
+- File Structure
+97% smaller overhead - Spec version 2 would always issue a 512 kilobyte initial request; version 3 reduces this to 16 kilobytes. What remains the same is that nearly any map tile can be retrieved in at most two additional requests.
+- Unlimited metadata - version 2 had a hard cap on the amount of JSON metadata of about 300 kilobytes; version 3 removes this limit. This is essential for tools like tippecanoe to store detailed column statistics. Essential archive information, such as tile type and compression methods, are stored in a binary header separate from application metadata.
+- Hilbert tile IDs - tiles internally are addressed by a single 64-bit Hilbert tile ID instead of Z/X/Y. See the blog post on Tile IDs for details.
+- Archive ordering - An optional clustered mode enforces that tile contents are laid out in Tile ID order.
+- Compressed directories and metadata - Directories used to fetch offsets of tile data consume about 10% the space of those in version 2. See the blog post on compressed directories for details.
+- JavaScript
+Compression - The TypeScript pmtiles library now includes a decompressor - fflate - to allow reading compressed vector tile archives directly in the browser. This reduces the size and latency of vector tiles by as much as 70%.
+- Tile Cancellation - All JavaScript plugins now support tile cancellation, meaning quick zooming across many levels will interrupt the loading of tiles that are never shown. This has a significant effect on the perceived user experience, as tiles at the end of a animation will appear earlier.
+- ETag support - clients can detect when files change on static storage by reading the ETag HTTP header. This means that PMTiles-based map applications can update datasets in place at low frequency without running into caching problems.
+
+## Version 3 Specification
+
+### File structure
+
+A PMTiles archive is a single-file archive of square tiles with five main sections:
+
+1. A fixed-size, 127-byte **Header** starting with `PMTiles` and then the spec version - currently `3` - that contains offsets to the next sections.
+2. A root **Directory**, described below. The Header and Root combined must be less than 16,384 bytes.
+3. JSON metadata.
+4. Optionally, a section of **Leaf Directories**, encoded the same way as the root.
+5. The tile data.
+
+### Entries
+
+A Directory is a list of `Entries`, in ascending order by `TileId`:
+
+ Entry = (TileId uint64, Offset uint64, Length uint32, RunLength uint32)
+
+* `TileId` starts at 0 and corresponds to a cumulative position on the series of square Hilbert curves starting at z=0.
+* `Offset` is the position of the tile in the file relative to the start of the data section.
+* `Length` is the size of the tile in bytes.
+* `RunLength` is how many times this tile is repeated: the `TileId=5,RunLength=2` means that tile is present at IDs 5 and 6.
+* If `RunLength=0`, the offset/length points to a Leaf Directory where `TileId` is the first entry.
+
+### Directory Serialization
+
+Entries are stored in memory as integers, but serialized to disk using these compression steps:
+
+1. A little-endian varint indicating the # of entries
+2. Delta encoding of `TileId`
+3. Zeroing of `Offset`:
+ * `0` if it is equal to the `Offset` + `Length` of the previous entry
+ * `Offset+1` otherwise
+4. Varint encoding of all numbers
+5. Columnar ordering: all `TileId`s, all `RunLength`s, all `Length`s, then all `Offset`s
+6. Finally, general purpose compression as described by the `Header`'s `InternalCompression` field
+
+##3 Directory Hierarchy
+
+* The number of entries in the root directory and leaf directories is up to the implementation.
+* However, the compressed size of the header plus root directory is required in v3 to be under **16,384 bytes**. This is to allow latency-optimized clients to prefetch the root directory and guarantee it is complete. A sophisticated writer might need several attempts to optimize this.
+* Root size, leaf sizes and depth should be configurable by the user to optimize for different trade-offs: cost, bandwidth, latency.
+
+### Header Design
+
+*Certain fields belonging to metadata in v2 are promoted to fixed-size header fields. This allows a map container to be initialized to the desired extent or center without blocking on the JSON metadata, and allows proxies to return well-defined HTTP headers.*
+
+The `Header` is 127 bytes, with little-endian integer values:
+
+| offset | description | width |
+| ------ | ----------------------------------------------------------------------------------------- | ----- |
+| 0 | magic number `PMTiles` | 7 |
+| 7 | spec version, currently `3` | 1 |
+| 8 | offset of root directory | 8 |
+| 16 | length of root directory | 8 |
+| 24 | offset of JSON metadata, possibly compressed by `InternalCompression` | 8 |
+| 32 | length of JSON metadata | 8 |
+| 40 | offset of leaf directories | 8 |
+| 48 | length of leaf directories | 8 |
+| 56 | offset of tile data | 8 |
+| 64 | length of tile data | 8 |
+| 72 | # of addressed tiles, 0 if unknown | 8 |
+| 80 | # of tile entries, 0 if unknown | 8 |
+| 88 | # of tile contents, 0 if unknown | 8 |
+| 96 | boolean clustered flag, `0x1` if true | 1 |
+| 97 | `InternalCompression` enum (0 = Unknown, 1 = None, 2 = Gzip, 3 = Brotli, 4 = Zstd) | 1 |
+| 98 | `TileCompression` enum | 1 |
+| 99 | tile type enum (0 = Unknown/Other, 1 = MVT (PBF Vector Tile), 2 = PNG, 3 = JPEG, 4 = WEBP | 1 |
+| 100 | min zoom | 1 |
+| 101 | max zoom | 1 |
+| 102 | min longitude (signed 32-bit integer: longitude * 10,000,000) | 4 |
+| 106 | min latitude | 4 |
+| 110 | max longitude | 4 |
+| 114 | max latitude | 4 |
+| 118 | center zoom | 1 |
+| 119 | center longitude | 4 |
+| 123 | center latitude | 4 |
+
+### Notes
+
+* **# of addressed tiles**: the total number of tiles before run-length encoding, i.e. `Sum(RunLength)` over all entries.
+* **# of tile entries**: the total number of entries across all directories where `RunLength > 0`.
+* **# # of tile contents**: the number of referenced blobs in the tile section, or the unique # of offsets. If the archive is completely deduplicated, this is equal to the # of unique tile contents. If there is no deduplication, this is equal to the number of tile entries above.
+* **boolean clustered flag**: if true, blobs in the data section are ordered by Hilbert `TileId`. When writing with deduplication, this means that offsets are either contiguous with the previous offset+length, or refer to a lesser offset.
+* **compression enum**: Mandatory, tells the client how to decompress contents as well as provide correct `Content-Encoding` headers to browsers.
+* **tile type**: A hint as to the tile contents. Clients and proxies may use this to:
+ * Automatically determine a visualization method
+ * provide a conventional MIME type `Content-Type` HTTP header
+ * Enforce a canonical extension e.g. `.mvt`, `png`, `jpeg`, `.webp` to prevent duplication in caches
+
+### Organization
+
+In most cases, the archive should be in the order `Header`, Root Directory, JSON Metadata, Leaf Directories, Tile Data. It is possible to relocate sections other than `Header` arbitrarily, but no current writers/readers take advantage of this. A future design may allow for reverse-ordered archives to enable single-pass writing.
+
+
+## Version 2
+
+*Note: this is deprecated in favor of spec version 3.*
+
+PMTiles is a binary serialization format designed for two main access patterns: over the network, via HTTP 1.1 Byte Serving (`Range:` requests), or via memory-mapped files on disk. **All integer values are little-endian.**
+
+A PMTiles archive is composed of:
+* a fixed-size 512,000 byte header section
+* Followed by any number of tiles in arbitrary format
+* Optionally followed by any number of *leaf directories*
+
+### Header
+* The header begins with a 2-byte magic number, "PM"
+* Followed by 2 bytes, the PMTiles specification version (currently 2).
+* Followed by 4 bytes, the length of metadata (M bytes)
+* Followed by 2 bytes, the number of entries in the *root directory* (N entries)
+* Followed by M bytes of metadata, which **must be a JSON string with bounds, minzoom and maxzoom properties (new in v2)**
+* Followed by N * 17 bytes, the root directory.
+
+### Directory structure
+
+A directory is a contiguous sequence of 17 byte entries. A directory can have at most 21,845 entries. **A directory must be sorted by Z, X and then Y order (new in v2).**
+
+An entry consists of:
+* 1 byte: the zoom level (Z) of the entry, with the top bit set to 1 instead of 0 to indicate the offset/length points to a leaf directory and not a tile.
+* 3 bytes: the X (column) of the entry.
+* 3 bytes: the Y (row) of the entry.
+* 6 bytes: the offset of where the tile begins in the archive.
+* 4 bytes: the length of the tile, in bytes.
+
+**All leaf directory entries follow non-leaf entries. All leaf directories in a single directory must have the same Z value. (new in v2).**
+
+### Notes
+* A full directory of 21,845 entries holds exactly a complete pyramid with 8 levels, or 1+4+16+64+256+1024+4096+16384.
+* A PMTiles archive with less than 21,845 tiles should have a root directory and no leaf directories.
+* Multiple tile entries can point to the same offset; this is useful for de-duplicating certain tiles, such as an empty "ocean" tile.
+* Analogously, multiple leaf directory entries can point to the same offset; this can avoid inefficiently-packed small leaf directories.
+* The tentative media type for PMTiles archives is `application/vnd.pmtiles`.
+
+### Implementation suggestions
+
+* PMTiles is designed to make implementing a writer simple. Reserve 512KB, then write all tiles, recording their entry information; then write all leaf directories; finally, rewind to 0 and write the header.
+* The order of tile data in the archive is unspecified; an optimized implementation should arrange tiles on a 2D space-filling curve.
+* PMTiles readers should cache directory entries by byte offset, not by Z/X/Y. This means that deduplicated leaf directories result in cache hits.
\ No newline at end of file
diff --git a/docs/modules/polyfills/api-reference/README.md b/docs/modules/polyfills/api-reference/README.md
index 2e096116e0..0dfc331766 100644
--- a/docs/modules/polyfills/api-reference/README.md
+++ b/docs/modules/polyfills/api-reference/README.md
@@ -1,10 +1,8 @@
# Overview
-The optional `@loaders.gl/polyfills` module installs support for Node.js and older browsers.
+The `@loaders.gl/polyfills` module installs support for Node.js. This module should be imported before you call any loaders.gl functionality under Node.js
-loaders.gl is based on the HTML5 API provided by modern, evergreen browsers. Older browsers (mainly Edge and IE11) as well as versions of Node.js prior to v12 do not provide certain classes that loaders.gl depends on.
-
-Note that while `@loaders.gl/polyfills` is designed to work seamlessly with other loaders.gl modules, using it is not a requirement. There are other good polyfill modules available on `npm` that can be used in its place.
+loaders.gl is based on the HTML5 API provided by modern, evergreen browsers.
## Installation
@@ -16,33 +14,28 @@ npm install @loaders.gl/polyfills
Just import `@loaders.gl/polyfills` before you start using other loaders.gl modules.
-```js
+```typescript
import '@loaders.gl/polyfills';
import '@loaders.gl/core';
```
-To use the experimental `Blob` and `File` polyfills
+## Features
-```js
-import {installFilePolyfills} from '@loaders.gl/polyfills';
-installFilePolyfills();
-```
+The polyfills module installs the following capabilities.
-## Included Polyfills
+- fetching files from Node file system
+- Node Filesystem implementation
+- Node ReadableFile and WritableFile implementations
+- Node Crypto class
+- Node Stream support
+- Node library loading
+- Image parsing and encoding under Node.js
+ |
+## Deprecated polyfills
-| Polyfill | Node | Browser | Comments |
-| ------------------------------- | ------------ | -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-| `TextEncoder`/`TextDecoder` | Node.js < 11 | Yes (Older browsers) | Only UTF8 is guaranteed to be supported |
-| `atob`/`btoa` | All versions | No | Note: these functions are [not unicode safe](https://developer.mozilla.org/en-US/docs/Web/API/WindowBase64/Base64_encoding_and_decoding#The_Unicode_Problem), but OK to use for test cases. |
-| `fetch` | All versions | No | A subset of the fetch API is supported, see below. |
-| `Response` | All versions | No | A subset of the `Response` API is supported, see below. |
-| `Headers` | All versions | No | A subset of the fetch API is supported, see below. |
-| `Blob` (Experimental) | All versions | No | A subset of the fetch API is supported, see below. |
-| `File` (Experimental) | All versions | No | A subset of the fetch API is supported, see below. |
-| `FileReader` (Experimental) | All versions | No | A subset of the fetch API is supported, see below. |
-| `ReadableStream` (Experimental) | All versions | No | A subset of the ReadableStream API is supported. |
+Before Node v18, `fetch` needed to be polyfilled. The `@loaders.gl/polyfills` module still conditionally installs a fetch polyfill on Node 16, but this is expected to be removed in next major release.
-## fetch Polyfill
+### fetch Polyfill
The Node.js `fetch`, `Response` and `Headers` polyfills supports a large subset of the browser fetch API, including:
@@ -55,18 +48,9 @@ The Node.js `fetch`, `Response` and `Headers` polyfills supports a large subset
The Node.js `fetch` is able to follow 30X redirect: if `Response` has status 300-399 and `location` header is set, the `fetch` polyfill re-requests data from `location`.
-# TextEncoder and TextDecoder Polyfills
-
-`TextEncoder` and `TextDecoder` polyfills are provided to ensure these APIs are always available. In modern browsers these will evaluate to the built-in objects of the same name, however under Node.js polyfills are transparently installed.
-
-Note: The provided polyfills only guarantee UTF8 support.
-
## Remarks
-- Applications should only install this module if they need to run under older environments. While the polyfills are only installed at runtime if the platform does not already support them, importing this module will increase the application's bundle size.
-- Refer to browser documentation for the usage of these classes, e.g. MDN.
-- In the browser, overhead of using these imports is not as high, as most polyfills are only bundled under Node.js.
-- If working under older browsers, e.g. IE11, you may need to install your own TextEncoder/TextDecoder polyfills before loading this library
+- The polyfills module can safely be imported in the browser. It is designed to be a no-op in this case, though if you are using new cutting-edge bundlers, they may not respect this configuration.
## Attribution
diff --git a/docs/modules/schema/README.md b/docs/modules/schema/README.md
index 9dc2df7e0d..baf6c9744e 100644
--- a/docs/modules/schema/README.md
+++ b/docs/modules/schema/README.md
@@ -11,11 +11,11 @@
The table API is modelled after a subset of the Apache Arrow API:
-| Class | Arrow Counterpart | Description |
-| ------------------------------------------------------------------ | ----------------- | ------------ |
-| [`Table`](/docs/modules/schema/api-reference/table) | Table | Table |
-| [`TableSchema`](/docs/modules/schema/api-reference/table-schema) | `Schema` | Table schema |
-| [`TableBatch`](/docs/modules/schema/api-reference/table-batch) | `RecordBatch` | Table batch |
+| Class | Arrow Counterpart | Description |
+| --------------------------------------------------------- | ----------------- | ----------- |
+| [`Table`](/docs/modules/schema/api-reference/table) | `Table` | Table |
+| [`Schema`](/docs/modules/schema/api-reference/schema) | `Schema` | Schema |
+| [`Batch`](/docs/modules/schema/api-reference/table-batch) | `RecordBatch` | Batch |
## Determining shape of loaded data
@@ -44,12 +44,12 @@ processTile(tile.data);
### Table Category
-| Shape | Category | Types / Description |
-| --- | --- | --- |
-| `table` | `Table` |
-| `array-row-table` | `ArrayRowTable` |
+| Shape | Category | Types / Description |
+| ------------------ | ---------------- | ------------------- |
+| `table` | `Table` |
+| `array-row-table` | `ArrayRowTable` |
| `object-row-table` | `ObjectRowTable` |
-| `columnar-table` | `ColumnarTable` |
+| `columnar-table` | `ColumnarTable` |
- Tables can be
- row-oriented, i.e. organized as an array of rows
@@ -59,7 +59,7 @@ Rows can contain either
- an array of values, where the column name is found in the schema.
- object with key-value pairs, where the key is the column name
-```json
+```typescripton
{
"shape": ,
"data":
@@ -69,10 +69,10 @@ Rows can contain either
## GIS Category
-| Shape | Category | Types / Description |
-| --- | --- | --- |
-| `geojson` | `GeoJSON` | GeoJSON is a `features` array wrapped at the top level |
-| `array-row-table` | `ArrayRowTable` |
+| Shape | Category | Types / Description |
+| ------------------ | ---------------- | ---------------------------------------------------------------- |
+| `geojson` | `GeoJSON` | GeoJSON is a `features` array wrapped at the top level |
+| `array-row-table` | `ArrayRowTable` |
| `object-row-table` | `ObjectRowTable` |
-| `geojson-table` | `GeojsonTable` | GeoJSON table essentially contains the `features` array from the
+| `geojson-table` | `GeojsonTable` | GeoJSON table essentially contains the `features` array from the |
diff --git a/docs/modules/schema/api-reference/apache-arrow.md b/docs/modules/schema/api-reference/apache-arrow.md
new file mode 100644
index 0000000000..ff37b1100c
--- /dev/null
+++ b/docs/modules/schema/api-reference/apache-arrow.md
@@ -0,0 +1,4 @@
+# Apache Arrow
+
+loaders.gl aims to provide strong support for and interoperability with Apache Arrow.
+
diff --git a/docs/modules/schema/api-reference/geometries.md b/docs/modules/schema/api-reference/geometries.md
new file mode 100644
index 0000000000..ed7514f50b
--- /dev/null
+++ b/docs/modules/schema/api-reference/geometries.md
@@ -0,0 +1,27 @@
+# Geometries
+
+
+## GeoJSONTable
+
+The `GeoJSONTable` is one of the standard data return formats from loaders.gl loaders.
+It is a GeoJSON FeatureCollection with two extra fields (`shape` and `schema`)
+
+
+## Binary Geometries
+
+loaders.gl defines a Binary Geometry Format.
+
+The format is designed to work directly with the binary support in deck.gl layers.
+
+This format is currently described in more detail in the `@loaders.gl/gis` module documentation.
+
+##
+
+## Tesselation
+
+Some loaders can perform tesselation.
+
+This is typically done with earcut, which is a very fast polygon tesselator. The drawback with
+
+There can also be problems if polygons have very large numbers of holes.
+
diff --git a/docs/modules/schema/api-reference/schema.md b/docs/modules/schema/api-reference/schema.md
new file mode 100644
index 0000000000..35e39c9d5f
--- /dev/null
+++ b/docs/modules/schema/api-reference/schema.md
@@ -0,0 +1,19 @@
+# Schema
+
+loaders.gl provides a simple serializable schema class to help describe tables and table like data.
+The Schema is modelled after Arrow.
+
+
+## Schema Deduction
+
+Schemas can be deduced, but unless the data format is binary, this can lead to mistakes.
+
+For instance, should a column with zip codes in a CSV be treated as strings or numbers? (Most auto detection systems would classify the type as numbers, but most users would prefer for that column to be classified as string, to avoid potential dropping of leading zeroes among other things.)
+
+## Schema Serialization
+
+..
+
+## Apache Arrow Schemas
+
+...
diff --git a/docs/modules/schema/api-reference/table-schema.md b/docs/modules/schema/api-reference/table-schema.md
deleted file mode 100644
index ab2b1b22ec..0000000000
--- a/docs/modules/schema/api-reference/table-schema.md
+++ /dev/null
@@ -1 +0,0 @@
-# TableSchema
diff --git a/docs/modules/schema/api-reference/table.md b/docs/modules/schema/api-reference/table.md
index f91505acf6..55b81ecc8f 100644
--- a/docs/modules/schema/api-reference/table.md
+++ b/docs/modules/schema/api-reference/table.md
@@ -1 +1,30 @@
# Table
+
+loaders.gl defines a number of table types.
+
+- `ObjectRowTable`
+- `ArrayRowTable`
+- `GeoJSONTable`
+- `ColumnarTable`
+- `ArrowTable`
+
+These all have a `shape` field on the top level.
+
+(If you are an advanced TypeScript programmer, you will appreciate that this lets typescript treat table types as a "discriminated union", meaning that once the type has been checked in an if or switch statement, the typing of the table is implied).
+
+
+## Table Schemas
+
+Each table has an optional `schema` field. If it is present, it contains a list of fields (name, type and metadata for each field), as well as metadata for the table itself.
+
+There are also utilities for deducing schemas.
+
+## Table Utilities
+
+A set of utilities are provided to work with tables independently of which of the supported representations they are in.
+
+- `tableLength``
+- ...
+
+
+
diff --git a/docs/modules/shapefile/api-reference/dbf-loader.md b/docs/modules/shapefile/api-reference/dbf-loader.md
index 0eb6a21180..1ad7075ae3 100644
--- a/docs/modules/shapefile/api-reference/dbf-loader.md
+++ b/docs/modules/shapefile/api-reference/dbf-loader.md
@@ -22,7 +22,7 @@ Note: Most applications will want to use the `ShapefileLoader` instead of this l
The `DBFLoader` parses feature attributes from the Shapefile format.
-```js
+```typescript
import {DBFLoader} from '@loaders.gl/shapefile';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/shapefile/api-reference/shapefile-loader.md b/docs/modules/shapefile/api-reference/shapefile-loader.md
index 71fd86ff5d..ab8dceb65a 100644
--- a/docs/modules/shapefile/api-reference/shapefile-loader.md
+++ b/docs/modules/shapefile/api-reference/shapefile-loader.md
@@ -19,7 +19,7 @@ Shapefile loader
## Usage
-```js
+```typescript
import {ShapefileLoader} from '@loaders.gl/shapefile';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/shapefile/api-reference/shp-loader.md b/docs/modules/shapefile/api-reference/shp-loader.md
index 9ed16ea946..97b72faf9f 100644
--- a/docs/modules/shapefile/api-reference/shp-loader.md
+++ b/docs/modules/shapefile/api-reference/shp-loader.md
@@ -21,7 +21,7 @@ Note: Most applications will want to use the `ShapefileLoader` instead of this l
## Usage
-```js
+```typescript
import {SHPLoader} from '@loaders.gl/shapefile';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/shapefile/formats/shapefile.md b/docs/modules/shapefile/formats/shapefile.md
index f1b793d757..c50c288111 100644
--- a/docs/modules/shapefile/formats/shapefile.md
+++ b/docs/modules/shapefile/formats/shapefile.md
@@ -1,23 +1,24 @@
# Shapefile
-- *[`@loaders.gl/shapefile`](/docs/modules/shapefile/formats/shapefile)*
-- *https://www.clicketyclick.dk/databases/xbase/format/data_types.html*
-- *http://www.dbase.com/Knowledgebase/INT/db7_file_fmt.htm*
-- *http://webhelp.esri.com/arcgisdesktop/9.3/index.cfm?TopicName=Geoprocessing_considerations_for_shapefile_output*
-- *https://www.loc.gov/preservation/digital/formats/fdd/fdd000326.shtml*
-- *https://support.esri.com/en/technical-article/000013192*
+ESRI Shapefiles is a file format for storing geospatial vector data.
-ESRI Shapefiles are a popular file format for storing geospatial vector data.
+- *[`@loaders.gl/shapefile`](/docs/modules/shapefile)*
+- *[Wikipedia](https://en.wikipedia.org/wiki/Shapefile)* - *[ESRI Shapefile Whitepaper](https://www.esri.com/content/dam/esrisites/sitecore-archive/Files/Pdfs/library/whitepapers/pdfs/shapefile.pdf)* - *[Notes on Shapefile usage](http://webhelp.esri.com/arcgisdesktop/9.3/index.cfm?TopicName=Geoprocessing_considerations_for_shapefile_output)*
+- *[DBF header](http://www.dbase.com/Knowledgebase/INT/db7_file_fmt.htm)* - *[data types](https://www.clicketyclick.dk/databases/xbase/format/data_types.html_)* - *[code pages](https://support.esri.com/en/technical-article/000013192)* - *[implementation notes](https://www.loc.gov/preservation/digital/formats/fdd/fdd000326.shtml)*
-## Multi-file Summary
+*Note that Shapefiles are falling out of favor in modern usage (likely due to the significant inconvenience of having to deal with multiple files). However, a lot of valuable geospatial data available is still provided in Shapefile format, and sometimes only in this format.
+Additional information and some strong opinions can be found at [switchfromshapefile.org](http://switchfromshapefile.org/).*
-The format consists of a number of files that must be stored together
-(in the same directory, and with the same file name but different extensions).
-Files with extensions `.shp`, `.shx`, `.dbf` must exist;
-additional files with other extensions such as `.prj` and `.cpg` may exist.
+## A multi-file format
-A common problem with shapefiles is that the user only opens the shp file but not the dbf.
+A Shapefile consists of a number of files that must be read and written together.
+Because of this, they are typically stored together with the same file name but different extensions.
+These related files are usually stored in the same directory or inside a common zip archive.
+While it is possible to just load the geometries from a `.shp` file, files with extensions `.shp`, `.shx`, `.dbf` are often expected to exist,
+and additional files with other extensions such as `.prj` and `.cpg` may also exist, if needed.
+
+A common problem with shapefiles is that the user only opens the `.shp` file but not the accompanying files such as `.dbf`.
| File | Type | Contents |
| ------ | ------ | -------------------------------------------------------------------------------------------------------------- |
@@ -26,3 +27,48 @@ A common problem with shapefiles is that the user only opens the shp file but no
| `.shx` | Binary | The index (technically required, however it is sometimes possible to open shapefiles without the index) |
| `.prj` | Text | A small usually single line text file containing a WKT-CRS style projection. WGS84 is assumed if not present. |
| `.cpg` | Text | A small text file containing a text encoding name for the DBF text fields. `latin1` is assumed if not present. |
+
+### Coordinate Systems
+
+Arbitrary coordinate reference systems are supported for Shapefiles.
+
+Such coordinate systems are reprojected to WGS84 on import.
+
+### Encodings
+
+The optional "code page" file (`.cpg`) specifies the encoding of any text data in the Shapefile (or more precisely, in the sidecar `.dbf` file). If no `.cpg` file is provided, `latin1` encoding is assumed.
+
+### Geometries
+
+A Shapefile always encodes a single type of geometries. The following geometries are supported:
+
+| Shape type | GeoJSON | loaders.gl | Value | Fields |
+| ------------- | ------------ | ---------- | ----- | --------------------------------------------------------------------------------------------------------------- |
+| `Null` shape | `null` | ✅ | 0 | None |
+| `Point` | `Point` | ✅ | 1 | X, Y |
+| `Polyline` | `LineString` | ✅ | 3 | MBR, Number of parts, Number of points, Parts, Points |
+| `Polygon` | `Polygon` | ✅ | 5 | MBR, Number of parts, Number of points, Parts, Points |
+| `MultiPoint` | `MultiPoint` | ✅ | 8 | MBR, Number of points, Points |
+| `PointZ` | `Point` | ✅ | 11 | X, Y, Z Optional: M |
+| `PolylineZ` | `LineString` | ✅ | 13 | MBR, Number of parts, Number of points, Parts, Points, Z range, Z array Optional: M range, M array |
+| `PolygonZ` | `Polygon` | ✅ | 15 | MBR, Number of parts, Number of points, Parts, Points, Z range, Z array Optional: M range, M array |
+| `MultiPointZ` | `MultiPoint` | ✅ | 18 | MBR, Number of points, Points, Z range, Z array Optional: M range, M array |
+| `PointM` | `Point` | ✅ | 21 | X, Y, M |
+| `PolylineM` | `LineString` | ✅ | 23 | MBR, Number of parts, Number of points, Parts, Points Optional: M range, M array |
+| `PolygonM` | `Polygon` | ✅ | 25 | MBR, Number of parts, Number of points, Parts, Points Optional: M range, M array |
+| `MultiPointM` | `MultiPoint` | ✅ | 28 | MBR, Number of points, Points Optional Fields: M range, M array |
+| `MultiPatch` | | ❌ | 31 | MBR , Number of parts, Number of points, Parts, Part types, Points, Z range, Z array Optional: M range, M array |
+
+- `value` is the internal shapefile encoding
+
+### Version History
+
+- The shapefile format was introduced with ArcView GIS version 2 in the early 1990s.
+
+### Troubleshooting
+
+- No data columns: The most common problem with shapefile is probably that they user only opens the main `.shp` file. In this case only the geometry is included, but no data columns are present.
+- Geometry projection issues: geometry may fail to load or be visualized incorrectly without the associated `.prj` file.
+- Incorrect strings: Encodings may not be correct without the `.cpg` file.
+
+Also note that there is a very large number of possible projections and it is hard to test that every possible projection is supported. If your data is old or known to be problematic, it may be worth double checking that things look correct after importing.
diff --git a/docs/modules/terrain/api-reference/quantized-mesh-loader.md b/docs/modules/terrain/api-reference/quantized-mesh-loader.md
index 6220ff99f5..d1f9367942 100644
--- a/docs/modules/terrain/api-reference/quantized-mesh-loader.md
+++ b/docs/modules/terrain/api-reference/quantized-mesh-loader.md
@@ -22,7 +22,7 @@ mesh][quantized_mesh] format.
## Usage
-```js
+```typescript
import {QuantizedMeshLoader} from '@loaders.gl/terrain';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/terrain/api-reference/terrain-loader.md b/docs/modules/terrain/api-reference/terrain-loader.md
index dbf7eae558..6a683c2d24 100644
--- a/docs/modules/terrain/api-reference/terrain-loader.md
+++ b/docs/modules/terrain/api-reference/terrain-loader.md
@@ -15,7 +15,7 @@ The `TerrainLoader` reconstructs mesh surfaces from height map images, e.g. [Map
## Usage
-```js
+```typescript
import {ImageLoader} from '@loaders.gl/images';
import {TerrainLoader} from '@loaders.gl/terrain';
import {load, registerLoaders} from '@loaders.gl/core';
diff --git a/docs/modules/textures/api-reference/basis-loader.md b/docs/modules/textures/api-reference/basis-loader.md
index 70053cddeb..69b91a5f39 100644
--- a/docs/modules/textures/api-reference/basis-loader.md
+++ b/docs/modules/textures/api-reference/basis-loader.md
@@ -16,7 +16,7 @@ A loader for Basis Universal "supercompressed" GPU textures. Extracts supercompr
## Usage
-```js
+```typescript
import {BasisLoader} from '@loaders.gl/textures';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/textures/api-reference/compressed-texture-loader.md b/docs/modules/textures/api-reference/compressed-texture-loader.md
index 0d52effbf1..25613aa9ee 100644
--- a/docs/modules/textures/api-reference/compressed-texture-loader.md
+++ b/docs/modules/textures/api-reference/compressed-texture-loader.md
@@ -16,7 +16,7 @@ Loader for compressed textures in the PVR file format
## Usage
-```js
+```typescript
import {CompressedTextureLoader} from '@loaders.gl/textures';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/textures/api-reference/compressed-texture-writer.md b/docs/modules/textures/api-reference/compressed-texture-writer.md
index 5a03450839..56d26d7e44 100644
--- a/docs/modules/textures/api-reference/compressed-texture-writer.md
+++ b/docs/modules/textures/api-reference/compressed-texture-writer.md
@@ -1,4 +1,4 @@
-# CompressedTextureWriter
+# CompressedTextureWriter 🚧
@@ -19,7 +19,7 @@
## Usage
-```js
+```typescript
import '@loaders.gl/polyfill'; // only if using under Node
import {encodeURLtoURL} from '@loaders.gl/core';
import {CompressedTextureWriter} from '@loaders.gl/textures';
diff --git a/docs/modules/textures/api-reference/crunch-loader.md b/docs/modules/textures/api-reference/crunch-loader.md
index 905920b394..0ffe64c5d1 100644
--- a/docs/modules/textures/api-reference/crunch-loader.md
+++ b/docs/modules/textures/api-reference/crunch-loader.md
@@ -16,7 +16,7 @@ Loader for compressed textures in the Crunch file format
## Usage
-```js
+```typescript
import {CrunchWorkerLoader} from '@loaders.gl/textures';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/textures/api-reference/ktx2-basis-texture-writer.md b/docs/modules/textures/api-reference/ktx2-basis-texture-writer.md
index 231809d150..09f8413742 100644
--- a/docs/modules/textures/api-reference/ktx2-basis-texture-writer.md
+++ b/docs/modules/textures/api-reference/ktx2-basis-texture-writer.md
@@ -1,4 +1,4 @@
-# KTX2BasisWriter
+# KTX2BasisWriter 🚧
@@ -19,7 +19,7 @@
## Usage
-```js
+```typescript
import '@loaders.gl/polyfill'; // only if using under Node
import {load, encode} from '@loaders.gl/core';
import {KTX2BasisUniversalTextureWriter} from '@loaders.gl/textures';
diff --git a/docs/modules/textures/api-reference/load-image-array.md b/docs/modules/textures/api-reference/load-image-array.md
index 6f05e44f07..41c8c439b8 100644
--- a/docs/modules/textures/api-reference/load-image-array.md
+++ b/docs/modules/textures/api-reference/load-image-array.md
@@ -9,7 +9,7 @@ A function that loads an array of images. Primarily intended for loading:
Loading an array of images
-```js
+```typescript
import '@loaders.gl/polyfills'; // only needed for Node.js support
import {loadImageArray} from `@loaders.gl/images`;
@@ -20,7 +20,7 @@ for (const image of images) {
}
```
-```js
+```typescript
import '@loaders.gl/polyfills'; // only needed for Node.js support
import {loadImageArray} from `@loaders.gl/images`;
diff --git a/docs/modules/textures/api-reference/load-image-cube.md b/docs/modules/textures/api-reference/load-image-cube.md
index b99b4610d6..8f54b736f3 100644
--- a/docs/modules/textures/api-reference/load-image-cube.md
+++ b/docs/modules/textures/api-reference/load-image-cube.md
@@ -6,7 +6,7 @@ A function that loads 6 images representing the faces of a cube. Primarily inten
Load images for a cubemap with one image per face
-```js
+```typescript
import '@loaders.gl/polyfills'; // only needed for Node.js support
import {loadImageCube} from `@loaders.gl/images`;
@@ -19,7 +19,7 @@ for (const face in imageCube) {
Load images for a cubemap with an array of mip images per face
-```js
+```typescript
import '@loaders.gl/polyfills'; // only needed for Node.js support
import {loadImageCube} from `@loaders.gl/images`;
diff --git a/docs/modules/textures/api-reference/load-image.md b/docs/modules/textures/api-reference/load-image.md
index 679cc1e13b..88d3975771 100644
--- a/docs/modules/textures/api-reference/load-image.md
+++ b/docs/modules/textures/api-reference/load-image.md
@@ -2,14 +2,14 @@
## Usage
-```js
+```typescript
import '@loaders.gl/polyfills'; // only needed if using under Node
import {loadImage} from `@loaders.gl/images`;
const image = await loadImage(url);
```
-```js
+```typescript
import '@loaders.gl/polyfills'; // only needed if using under Node
import {loadImage} from `@loaders.gl/images`;
diff --git a/docs/modules/textures/api-reference/npy-loader.md b/docs/modules/textures/api-reference/npy-loader.md
index cc6365cc1c..481090dd8e 100644
--- a/docs/modules/textures/api-reference/npy-loader.md
+++ b/docs/modules/textures/api-reference/npy-loader.md
@@ -21,7 +21,7 @@ The `NPYLoader` parses an array from the [NPY format][npy-spec], a lightweight e
## Usage
-```js
+```typescript
import {_NPYLoader} from '@loaders.gl/textures';
import {load} from '@loaders.gl/core';
diff --git a/docs/modules/textures/formats/compressed-textures.md b/docs/modules/textures/formats/compressed-textures.md
index a1a245161f..a222639fc7 100644
--- a/docs/modules/textures/formats/compressed-textures.md
+++ b/docs/modules/textures/formats/compressed-textures.md
@@ -100,7 +100,7 @@ Data returned by any loaders.gl "image" category loader (including texture loade
To use compressed textures in WebGL
-```js
+```typescript
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
@@ -143,7 +143,7 @@ Support for compressed textures is a work in progress in the [WebGPU standard](h
At the time of writing, only S3 texture compression has been specified:
-```js
+```typescript
// BC compressed formats usable if "texture-compression-bc" is both
// supported by the device/user agent and enabled in requestDevice.
"bc1-rgba-unorm",
diff --git a/docs/modules/tile-converter/api-reference/3d-tiles-converter.md b/docs/modules/tile-converter/api-reference/3d-tiles-converter.md
index f63287cd02..b8e5cd2d61 100644
--- a/docs/modules/tile-converter/api-reference/3d-tiles-converter.md
+++ b/docs/modules/tile-converter/api-reference/3d-tiles-converter.md
@@ -8,7 +8,7 @@ The `Tiles3DConverter` class converts an I3S layer. It converts between the OGC
## Usage
-```js
+```typescript
import {Tiles3DConverter} from '@loaders.gl/tile-converter';
const TILESET_URL =
diff --git a/docs/modules/tile-converter/api-reference/i3s-converter.md b/docs/modules/tile-converter/api-reference/i3s-converter.md
index b4ff40a300..da8b68b954 100644
--- a/docs/modules/tile-converter/api-reference/i3s-converter.md
+++ b/docs/modules/tile-converter/api-reference/i3s-converter.md
@@ -8,7 +8,7 @@ The `I3SConverter` class converts a 3D Tiles tileset to I3S layer.
## Usage
-```js
+```typescript
import {I3SConverter} from '@loaders.gl/tile-converter';
const converter = new I3SConverter();
@@ -45,7 +45,9 @@ Converts a tileset to I3S format
- `options.generateTextures: boolean` Whether the converter should generate additional texture of another format. For non-compressed source texture format (JPG, PNG) the converter creates additional KTX2 texture. For compressed source texture (KTX2) the converter creates additional JPG texture. To encode and decode KTX2 [Basis Universal Supercompressed GPU Texture Codec](https://github.com/BinomialLLC/basis_universal) is used.
- `options.generateBoundingVolumes: boolean` Whether the converter generate new bounding volumes from the mesh vertices. The default behavior is convertion bounding volumes (box, sphere or region) from 3DTiles tileset data. If this option is set `true` the converter will ignore source bounding volume and generate new bounding volume (oriented bounding box and minimal bounding sphere) from the geometry POSITION attribute.
- `options.instantNodeWriting: boolean` Whether the converter should keep JSON resources ([3DNodeIndexDocuments](https://github.com/Esri/i3s-spec/blob/master/docs/1.8/3DNodeIndexDocument.cmn) and [nodePages](https://github.com/Esri/i3s-spec/blob/master/docs/1.8/nodePage.cmn)) on disk during conversion. The default behavior is the converter keeps JSON resources in memory till the end of conversion. Those resources need to be updated during conversion (adding child nodes and neighbor nodes). If this option is set `true` the converter will keep JSON resources on disk all the time. Use this option for large datasets when the nodes tree is large and "memory overflow" error occurs. Instant node writing saves memory usage in cost of conversion speed (>2 times slower).
-- `options.validate` Enable Validation
+- `options.metadataClass: string` One of the list of feature metadata classes, detected by converter on "analyze" stage
+- `options.analyze: boolean` Analyze the input tileset content without conversion.
+- `options.validate: boolean` Enable Validation.
### Validation
diff --git a/docs/modules/tile-converter/cli-reference/i3s-server.md b/docs/modules/tile-converter/cli-reference/i3s-server.md
new file mode 100644
index 0000000000..790c783501
--- /dev/null
+++ b/docs/modules/tile-converter/cli-reference/i3s-server.md
@@ -0,0 +1,81 @@
+# I3S Server
+
+
+
+
+
+
+
+
+
+
+
+
@@ -10,110 +10,159 @@