diff --git a/hana/lib/HANAService.js b/hana/lib/HANAService.js index 570fd99d1..c96eefa9c 100644 --- a/hana/lib/HANAService.js +++ b/hana/lib/HANAService.js @@ -614,7 +614,7 @@ class HANAService extends SQLService { return (this.sql = `INSERT INTO ${this.quote(entity)} (${this.columns.map(c => this.quote(c), )}) WITH SRC AS (SELECT ? AS JSON FROM DUMMY UNION ALL SELECT TO_NCLOB(NULL) AS JSON FROM DUMMY) - SELECT ${converter} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(${extraction}))`) + SELECT ${converter} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(${extraction}) ERROR ON ERROR)`) } INSERT_rows(q) { diff --git a/postgres/lib/PostgresService.js b/postgres/lib/PostgresService.js index 2a2ca7433..d27b8dc4c 100644 --- a/postgres/lib/PostgresService.js +++ b/postgres/lib/PostgresService.js @@ -397,8 +397,8 @@ GROUP BY k // Adjusts json path expressions to be postgres specific .replace(/->>'\$(?:(?:\."(.*?)")|(?:\[(\d*)\]))'/g, (a, b, c) => (b ? `->>'${b}'` : `->>${c}`)) // Adjusts json function to be postgres specific - .replace('json_each(?)', 'jsonb_array_elements($1::jsonb)') - .replace(/json_type\((\w+),'\$\."(\w+)"'\)/g, (_a, b, c) => `jsonb_typeof(${b}->'${c}')`)) + .replace('json_each(?)', 'json_array_elements($1::json)') + .replace(/json_type\((\w+),'\$\."(\w+)"'\)/g, (_a, b, c) => `json_typeof(${b}->'${c}')`)) } param({ ref }) { diff --git a/test/scenarios/bookshop/insert-large.test.js b/test/scenarios/bookshop/insert-large.test.js new file mode 100644 index 000000000..ec57dae4e --- /dev/null +++ b/test/scenarios/bookshop/insert-large.test.js @@ -0,0 +1,44 @@ +const { Readable } = require('stream') +const cds = require('../../cds.js') +const bookshop = cds.utils.path.resolve(__dirname, '../../bookshop') + +// Stress test should not be run in the pipeline +describe.skip('Bookshop - Insert', () => { + cds.test(bookshop) + + test('Large (~33 mil rows)', async () => { + const { Books } = cds.entities('sap.capire.bookshop') + + // Postgres + // json (1 << 25) -> 5 min (with WAL warnings) + // jsonb (1 << 24) -> size limit reached + // json (1 << 23) -> 82.148 sec + // jsonb (1 << 23) -> 52.148 sec + // json (1 << 10) -> 2.35 sec + // jsonb (1 << 10) -> 2.62 sec + + let totalRows = (1 << 20) + let totalSize = 0 + const bufferSize = 1 << 16 + const stream = Readable.from((function* () { + let buffer = '[' + let i = 1000 + const target = i + totalRows + buffer += `{"ID":${i++}}` + for (; i < target;) { + buffer += `,{"ID":${i++}}` + if (buffer.length >= bufferSize) { + totalSize += buffer.length + yield buffer + buffer = '' + } + } + buffer += ']' + totalSize += buffer.length + yield buffer + })(), { objectMode: false }) + const s = performance.now() + await INSERT(stream).into(Books) + process.stdout.write(`total size: ${totalSize} total rows: ${totalRows} rows/ms: (${totalRows / (performance.now() - s)})\n`) + }, 60 * 60 * 1000) +})