Skip to content

Commit

Permalink
chore: Reduce comments in test logs (#3109)
Browse files Browse the repository at this point in the history
  • Loading branch information
ibgreen authored Oct 2, 2024
1 parent 516fe8a commit 657ed46
Show file tree
Hide file tree
Showing 15 changed files with 50 additions and 47 deletions.
4 changes: 2 additions & 2 deletions modules/core/test/lib/fetch/fetch-file.node.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,13 @@ test('fetchFile() ignores url query params when loading file (NODE)', async (t)
test.skip('fetchFile() error handling (NODE)', async (t) => {
if (!isBrowser) {
let response = await fetchFile('non-existent-file');
t.comment(response.statusText);
// t.comment(response.statusText);
t.ok(response.statusText.includes('ENOENT'), 'fetch statusText forwards node ENOENT error');
t.notOk(response.ok, 'fetchFile fails cleanly on non-existent file');
t.ok(response.arrayBuffer(), 'Response.arrayBuffer() does not throw');

response = await fetchFile('.');
t.comment(response.statusText);
// t.comment(response.statusText);
t.ok(response.statusText.includes('EISDIR'), 'fetch statusText forwards node error');
t.notOk(response.ok, 'fetchFile fails cleanly on directory');
t.ok(response.arrayBuffer(), 'Response.arrayBuffer() does not throw');
Expand Down
6 changes: 3 additions & 3 deletions modules/csv/test/csv-loader.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -239,9 +239,9 @@ test('CSVLoader#loadInBatches(sample.csv, object-rows)', async (t) => {
for await (const batch of iterator) {
t.equal(batch.shape, 'object-row-table', 'Got correct batch shape');
if (batch.shape === 'object-row-table') {
t.comment(
`BATCH ${batch.count}: ${batch.length} ${JSON.stringify(batch.data).slice(0, 200)}`
);
// t.comment(
// `BATCH ${batch.count}: ${batch.length} ${JSON.stringify(batch.data).slice(0, 200)}`
// );
t.equal(batch.length, 2, 'Got correct batch size');
t.deepEqual(batch.data[0], {column1: 'A', column2: 'B', column3: 1}, 'Got correct first row');
}
Expand Down
12 changes: 6 additions & 6 deletions modules/draco/test/draco-compression-ratio.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,16 @@ test('DracoWriter#compressRawBuffers', async (t) => {
COLOR_0: new Uint8ClampedArray(COLORS)
};

t.comment(
`Encoding ${attributes.POSITION.length} positions, ${attributes.COLOR_0.length} colors...`
);
// t.comment(
// `Encoding ${attributes.POSITION.length} positions, ${attributes.COLOR_0.length} colors...`
// );

// Encode mesh
// TODO - Replace with draco writer
const compressedMesh = await encode({attributes}, DracoWriter, {draco: {pointcloud: true}});
const meshSize = getMeshSize(attributes);
const ratio = meshSize / compressedMesh.byteLength;
t.comment(`Draco compression ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);
// const meshSize = getMeshSize(attributes);
// const ratio = meshSize / compressedMesh.byteLength;
// t.comment(`Draco compression ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);

// Ensure we can parse it
const data2 = await parse(compressedMesh, DracoLoader);
Expand Down
37 changes: 20 additions & 17 deletions modules/draco/test/draco-writer.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import {validateWriter, validateMeshCategoryData} from 'test/common/conformance'

import {DracoLoader, DracoWriterOptions, DracoWriter, DracoWriterWorker} from '@loaders.gl/draco';
import {encode, fetchFile, parse} from '@loaders.gl/core';
import {getMeshSize} from '@loaders.gl/schema';
// import {getMeshSize} from '@loaders.gl/schema';
import draco3d from 'draco3d';
import {isBrowser, processOnWorker, WorkerFarm} from '@loaders.gl/worker-utils';
import {cloneTypeArray} from './test-utils/copyTypedArray';
Expand Down Expand Up @@ -70,11 +70,12 @@ test('DracoWriter#encode(bunny.drc)', async (t) => {

for (const tc of TEST_CASES) {
const mesh = tc.options.draco?.pointcloud ? POINTCLOUD : MESH;
const meshSize = getMeshSize(mesh.attributes);

const compressedMesh = await encode(mesh, DracoWriter, tc.options);
const ratio = meshSize / compressedMesh.byteLength;
t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);

// const meshSize = getMeshSize(mesh.attributes);
// const ratio = meshSize / compressedMesh.byteLength;
// t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);

if (!tc.options.pointcloud) {
// Decode the mesh
Expand Down Expand Up @@ -120,14 +121,15 @@ test.skip('DracoWriter#Worker$encode(bunny.drc)', async (t) => {

for (const tc of TEST_CASES) {
const mesh = tc.options.draco?.pointcloud ? POINTCLOUD : MESH;
const meshSize = getMeshSize(mesh.attributes);

const compressedMesh = await processOnWorker(DracoWriterWorker, mesh, {
...tc.options,
_workerType: 'test'
});
const ratio = meshSize / compressedMesh.byteLength;
t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);

// const meshSize = getMeshSize(mesh.attributes);
// const ratio = meshSize / compressedMesh.byteLength;
// t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);

if (!tc.options.pointcloud) {
// Decode the mesh
Expand Down Expand Up @@ -166,15 +168,16 @@ test('DracoWriter#WorkerNodeJS#encode(bunny.drc)', async (t) => {
// @ts-expect-error
mesh.indices = cloneTypeArray(data.indices?.value);
}
const meshSize = getMeshSize(mesh.attributes);

const compressedMesh = await processOnWorker(DracoWriterWorker, mesh, {
...tc.options,
_workerType: 'test'
});

// const compressedMesh = await encode(mesh, DracoWriter, tc.options);
const ratio = meshSize / compressedMesh.byteLength;
t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);
// const meshSize = getMeshSize(mesh.attributes);
// const ratio = meshSize / compressedMesh.byteLength;
// t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);

if (!tc.options.pointcloud) {
// Decode the mesh
Expand Down Expand Up @@ -217,16 +220,17 @@ test('DracoWriter#encode via draco3d npm package (bunny.drc)', async (t) => {

for (const tc of TEST_CASES) {
const mesh = tc.options.draco?.pointcloud ? POINTCLOUD : MESH;
const meshSize = getMeshSize(mesh.attributes);

const compressedMesh = await encode(mesh, DracoWriter, {
...tc.options,
modules: {
draco3d
}
});
const ratio = meshSize / compressedMesh.byteLength;
t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);

// const meshSize = getMeshSize(mesh.attributes);
// const ratio = meshSize / compressedMesh.byteLength;
// t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);

if (!tc.options.pointcloud) {
// Decode the mesh
Expand Down Expand Up @@ -264,12 +268,11 @@ test('DracoWriter#encode(bunny.drc)', async (t) => {

for (const tc of TEST_CASES) {
const attributes = tc.options.draco?.pointcloud ? pointCloudAttributes : meshAttributes;
const meshSize = getMeshSize(attributes);

const compressedMesh = await encode(attributes, DracoWriter, tc.options);

const ratio = meshSize / compressedMesh.byteLength;
t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);
// const meshSize = getMeshSize(attributes);
// const ratio = meshSize / compressedMesh.byteLength;
// t.comment(`${tc.title} ${compressedMesh.byteLength} bytes, ratio ${ratio.toFixed(1)}`);

if (!tc.options.pointcloud) {
// Decode the mesh
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@ test('GLTFBuilder#addCompressedPointCloud', async t => {
POSITIONS: new Float32Array(POSITIONS),
COLORS: new Uint8ClampedArray(COLORS)
};
t.comment(
`Encoding ${attributes.POSITIONS.length} positions, ${attributes.COLORS.length} colors...`
);
// t.comment(
// `Encoding ${attributes.POSITIONS.length} positions, ${attributes.COLORS.length} colors...`
// );
const gltfBuilder = new GLTFBuilder({DracoWriter, DracoLoader});
t.equal(gltfBuilder.addCompressedPointCloud(attributes), 0, 'valid index for point cloud data');
Expand Down
2 changes: 1 addition & 1 deletion modules/obj/test/mtl-loader.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ test('MTLLoader#parse(windmill.mtl', async (t) => {
/** @type {import('../src/lib/parse-mtl').MTLMaterial[]} */
const materials = await load(MTL_URL, MTLLoader);

t.comment(JSON.stringify(materials));
// t.comment(JSON.stringify(materials));
t.equal(materials.length, 2, '2 material');

t.equal(materials[0].name, 'Material', 'Material');
Expand Down
2 changes: 1 addition & 1 deletion modules/parquet/test/geoparquet-loader.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ test('Load GeoParquet file', async (t) => {
});

test.skip('GeoParquetColumnarLoader#load', async (t) => {
t.comment('SUPPORTED FILES');
// t.comment('SUPPORTED FILES');
for (const fileName of GEOPARQUET_FILES) {
const url = `${PARQUET_DIR}/geoparquet/${fileName}`;
const data = await load(url, ParquetColumnarLoader, {worker: false});
Expand Down
6 changes: 3 additions & 3 deletions modules/parquet/test/parquet-columnar-loader.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -216,15 +216,15 @@ test('ParquetColumnarLoader#load', async (t) => {

// Buffer is not defined issue in worker thread of browser.
if (!isBrowser) {
t.comment('SUPPORTED FILES with worker');
// t.comment('SUPPORTED FILES with worker');
for (const {title, path} of SUPPORTED_FILES) {
const url = `${PARQUET_DIR}/${path}`;
const table = await load(url, ParquetColumnarLoader, {parquet: {url}, worker: true});
t.ok(table, `GOOD(${title})`);
}
}

t.comment('UNSUPPORTED FILES');
// t.comment('UNSUPPORTED FILES');
for (const {title, path} of UNSUPPORTED_FILES) {
const url = `${PARQUET_DIR}/${path}`;
try {
Expand All @@ -236,7 +236,7 @@ test('ParquetColumnarLoader#load', async (t) => {
}
}

t.comment('ENCRYPTED FILES');
// t.comment('ENCRYPTED FILES');
for (const {title, path} of ENCRYPTED_FILES) {
const url = `${PARQUET_DIR}/${path}`;
try {
Expand Down
8 changes: 4 additions & 4 deletions modules/parquet/test/parquet-loader.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -265,15 +265,15 @@ test('ParquetLoader#load non_hadoop_lz4_compressed file', async (t) => {
test('ParquetLoader#load', async (t) => {
// Buffer is not defined issue in worker thread of browser.
if (!isBrowser) {
t.comment('SUPPORTED FILES with worker');
// t.comment('SUPPORTED FILES with worker');
for (const {title, path} of SUPPORTED_FILES) {
const url = `${PARQUET_DIR}/${path}`;
const table = await load(url, ParquetLoader, {parquet: {url}, worker: true});
t.ok(table, `GOOD(${title})`);
}
}

t.comment('UNSUPPORTED FILES');
// t.comment('UNSUPPORTED FILES');
for (const {title, path} of UNSUPPORTED_FILES) {
const url = `${PARQUET_DIR}/${path}`;
try {
Expand All @@ -285,7 +285,7 @@ test('ParquetLoader#load', async (t) => {
}
}

t.comment('ENCRYPTED FILES');
// t.comment('ENCRYPTED FILES');
for (const {title, path} of ENCRYPTED_FILES) {
const url = `${PARQUET_DIR}/${path}`;
try {
Expand All @@ -297,7 +297,7 @@ test('ParquetLoader#load', async (t) => {
}
}

t.comment('BAD FILES');
// t.comment('BAD FILES');
for (const {title, path} of BAD_FILES) {
const url = `${PARQUET_DIR}/${path}`;
try {
Expand Down
4 changes: 2 additions & 2 deletions modules/parquet/test/parquet-wasm-loader.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ test('ParquetWASMLoader#Load Parquet file', async (t) => {
});

test('ParquetWasmLoader#load', async (t) => {
t.comment('SUPPORTED FILES');
// t.comment('SUPPORTED FILES');
for (const {title, path} of WASM_SUPPORTED_FILES) {
const url = `${PARQUET_DIR}/apache/${path}`;
const table = await load(url, ParquetWasmLoader);
Expand All @@ -64,7 +64,7 @@ test('ParquetWasmWriter#writer/loader round trip', async (t) => {

// TODO not implemented yet
test.skip('ParquetWasmLoader#loadInBatches', async (t) => {
t.comment('SUPPORTED FILES');
// t.comment('SUPPORTED FILES');
for (const {title, path} of WASM_SUPPORTED_FILES) {
const url = `${PARQUET_DIR}/apache/${path}`;
const iterator = await loadInBatches(url, ParquetWasmLoader);
Expand Down
2 changes: 1 addition & 1 deletion modules/textures/test/basis-loader.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ test('loadImageTexture#worker', t => {
}
const {title, width, height} = testCase;
t.comment(title);
// t.comment(title);
let {url} = testCase;
url = url.startsWith('data:') ? url : resolvePath(CONTENT_BASE + url);
Expand Down
2 changes: 1 addition & 1 deletion modules/textures/test/compressed-texture-writer.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ test('CompressedTextureWriter#write-and-read-image', async (t) => {
t.ok(outputFilename, 'a filename was returned');
} catch (error) {
// @ts-ignore
t.comment(error);
// t.comment(error);
}
t.end();
});
2 changes: 1 addition & 1 deletion modules/wms/test/csw/csw-domain-loader.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ const CSW_RESPONSE_2_0_2 =
'</csw:GetDomainResponse>';
test('CSWGetDomainLoader', async (t) => {
const domain = await parse(CSW_RESPONSE_2_0_2, CSWDomainLoader);
t.comment(JSON.stringify(domain));
// t.comment(JSON.stringify(domain));

const domainValues = domain.domainValues;
// test getRecordsResponse object
Expand Down
2 changes: 1 addition & 1 deletion modules/wms/test/csw/csw-records-loader.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ const CSW_RESPONSE_2_0_2 =
'</csw:GetRecordsResponse>';
test('CSWGetRecordsLoader', async (t) => {
const cswRecords = await parse(CSW_RESPONSE_2_0_2, CSWRecordsLoader);
t.comment(JSON.stringify(cswRecords));
// t.comment(JSON.stringify(cswRecords));

const searchStatus = cswRecords.searchStatus;
const searchResults = cswRecords.searchResults;
Expand Down
2 changes: 1 addition & 1 deletion modules/wms/test/gml/gml-loader.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ test('GMLLoader#parse', async (t) => {
const geojson = (await parse(xmlText, GMLLoader)) as GeoJSON;

t.equal(typeof geojson, 'object', `Parsed ${fileName}`);
t.comment(JSON.stringify(geojson));
// t.comment(JSON.stringify(geojson));
}
}

Expand Down

0 comments on commit 657ed46

Please sign in to comment.