Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: deep UPDATE without reading #866

Open
wants to merge 13 commits into
base: main
Choose a base branch
from
Open
2 changes: 1 addition & 1 deletion db-service/lib/SQLService.js
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ class SQLService extends DatabaseService {
// REVISIT: It's not yet 100 % clear under which circumstances we can rely on db constraints
return (super.onDELETE = /* cds.env.features.assert_integrity === 'db' ? this.onSIMPLE : */ deep_delete)
async function deep_delete(/** @type {Request} */ req) {
let { compositions } = req.target
let { compositions } = (req.target ??= req.query.target)
Copy link
Contributor

@David-Kunz David-Kunz Nov 13, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

when is this not set? /cap/cds/blob/402a114f8968c2bc25c5f1e2de2208f4cac0c66d/lib/srv/srv-dispatch.js#L29

if (compositions) {
// Transform CQL`DELETE from Foo[p1] WHERE p2` into CQL`DELETE from Foo[p1 and p2]`
let { from, where } = req.query.DELETE
Expand Down
41 changes: 35 additions & 6 deletions db-service/lib/cqn2sql.js
Original file line number Diff line number Diff line change
Expand Up @@ -546,6 +546,7 @@ class CQN2SQLRenderer {
let sepsub = ''
for (const key in row) {
let val = row[key]
const type = elements[key]?.type
if (val === undefined) continue
const keyJSON = `${sepsub}${JSON.stringify(key)}:`
if (!sepsub) sepsub = ','
Expand All @@ -565,7 +566,7 @@ class CQN2SQLRenderer {

buffer += '"'
} else {
if (elements[key]?.type in this.BINARY_TYPES) {
if (type in this.BINARY_TYPES) {
val = transformBase64(val)
}
buffer += `${keyJSON}${JSON.stringify(val)}`
Expand Down Expand Up @@ -750,7 +751,7 @@ class CQN2SQLRenderer {
const managed = this._managed.slice(0, columns.length)

const extractkeys = managed
.filter(c => keys.includes(c.name))
// .filter(c => keys.includes(c.name))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

?

.map(c => `${c.onInsert || c.sql} as ${this.quote(c.name)}`)

const entity = this.name(q.target?.name || UPSERT.into.ref[0])
Expand Down Expand Up @@ -830,6 +831,7 @@ class CQN2SQLRenderer {
const wrap = x.cast ? sql => `cast(${sql} as ${this.type4(x.cast)})` : sql => sql
if (typeof x === 'string') throw cds.error`Unsupported expr: ${x}`
if (x.param) return wrap(this.param(x))
if ('json' in x) return wrap(this.json(x))
if ('ref' in x) return wrap(this.ref(x))
if ('val' in x) return wrap(this.val(x))
if ('func' in x) return wrap(this.func(x))
Expand Down Expand Up @@ -1005,6 +1007,20 @@ class CQN2SQLRenderer {
return `(${list.map(e => this.expr(e))})`
}

json(arg) {
const { props, elements, json } = arg
const { _convertInput } = this.class
let val = typeof json === 'string' ? json : (arg.json = JSON.stringify(json))
if (val[val.length - 1] === ',') val = arg.json = val.slice(0, -1) + ']'
if (val[val.length - 1] === '[') val = arg.json = val + ']'
Comment on lines +1014 to +1015
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
if (val[val.length - 1] === ',') val = arg.json = val.slice(0, -1) + ']'
if (val[val.length - 1] === '[') val = arg.json = val + ']'
if (val.at(-1) === ',') val = arg.json = val.slice(0, -1) + ']'
if (val.at(-1) === '[') val = arg.json = val + ']'

this.values.push(val)
const extraction = props.map(p => {
const element = elements?.[p]
return this.managed_extract(p, element, a => element[_convertInput]?.(a, element) || a).extract
})
return `(SELECT ${extraction} FROM json_each(?))`
}

/**
* Renders a javascript string into a SQL string literal
* @param {string} s
Expand Down Expand Up @@ -1059,6 +1075,7 @@ class CQN2SQLRenderer {
managed(columns, elements) {
const cdsOnInsert = '@cds.on.insert'
const cdsOnUpdate = '@cds.on.update'
const cdsImmutable = '@Core.Immutable'

const { _convertInput } = this.class
// Ensure that missing managed columns are added
Expand All @@ -1079,6 +1096,10 @@ class CQN2SQLRenderer {

const keys = ObjectKeys(elements).filter(e => elements[e].key && !elements[e].isAssociation)
const keyZero = keys[0] && this.quote(keys[0])
const hasChanges = this.managed_changed(
[...columns, ...requiredColumns]
.filter(({ name }) => !elements?.[name]?.key && !elements?.[name]?.[cdsOnUpdate] && !elements?.[name]?.[cdsImmutable])
)

return [...columns, ...requiredColumns].map(({ name, sql }) => {
const element = elements?.[name] || {}
Expand All @@ -1101,21 +1122,22 @@ class CQN2SQLRenderer {
if (onUpdate) onUpdate = this.expr(onUpdate)

const qname = this.quote(name)
const immutable = element[cdsImmutable]

const insert = onInsert ? this.managed_default(name, converter(onInsert), sql) : sql
const update = onUpdate ? this.managed_default(name, converter(onUpdate), sql) : sql
const update = immutable ? undefined : onUpdate ? this.managed_default(name, converter(onUpdate), sql) : sql
const upsert = keyZero && (
// upsert requires the keys to be provided for the existance join (default values optional)
element.key
// If both insert and update have the same managed definition exclude the old value check
|| (onInsert && onUpdate && insert === update)
? `${insert} as ${qname}`
: `CASE WHEN OLD.${keyZero} IS NULL THEN ${
// If key of old is null execute insert
insert
} ELSE ${
// Else execute managed update or keep old if no new data if provided
onUpdate ? update : this.managed_default(name, `OLD.${qname}`, update)
!update
? `OLD.${qname}`
: this.managed_default(name, `OLD.${qname}`, update)
} END as ${qname}`
)

Expand Down Expand Up @@ -1149,6 +1171,13 @@ class CQN2SQLRenderer {
managed_default(name, managed, src) {
return `(CASE WHEN json_type(value,${this.managed_extract(name).extract.slice(8)}) IS NULL THEN ${managed} ELSE ${src} END)`
}

managed_changed(cols/*, comps*/) {
return `CASE WHEN ${[
...cols.map(({ name }) => `json_type(value,${this.managed_extract(name).extract.slice(8)}) IS NOT NULL AND OLD.${this.quote(name)} ${this.is_distinct_from_} NEW.${this.quote(name)}`),
// ...comps.map(({ name }) => `json_type(value,${this.managed_extract(name).extract.slice(8)}) IS NOT NULL`)
].join(' OR ')} THEN TRUE ELSE FALSE END`
}
}

// REVISIT: Workaround for JSON.stringify to work with buffers
Expand Down
126 changes: 126 additions & 0 deletions db-service/lib/deep-genres.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
-- DEBUG => this.dbc._native.prepare(cds.utils.fs.readFileSync(__dirname + '/deep-genres.sql','utf-8')).exec([JSON.stringify(query.UPDATE.data)])
DO (IN JSON NCLOB => ?) BEGIN

-- Extract genres with a depth of 3 (like: '$.children[*].children[*]')
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

depth can be determined based on input json, right? we must just union all all provided rows

Genres = SELECT
NEW.name,
NEW."$.NAME",
NEW.descr,
NEW."$.DESCR",
NEW.ID,
NEW."$.ID",
NEW.parent_ID,
NEW."$.PARENT_ID",
NEW."$.CHILDREN"
FROM
JSON_TABLE(
:JSON,
'$' COLUMNS(
name NVARCHAR(1020) PATH '$.name',
"$.NAME" NVARCHAR(2147483647) FORMAT JSON PATH '$.name',
descr NVARCHAR(4000) PATH '$.descr',
"$.DESCR" NVARCHAR(2147483647) FORMAT JSON PATH '$.descr',
ID INT PATH '$.ID',
"$.ID" NVARCHAR(2147483647) FORMAT JSON PATH '$.ID',
parent_ID INT PATH '$.parent_ID',
"$.PARENT_ID" NVARCHAR(2147483647) FORMAT JSON PATH '$.parent_ID',
"$.CHILDREN" NVARCHAR(2147483647) FORMAT JSON PATH '$.children'
)
ERROR ON ERROR
) AS NEW
UNION ALL
SELECT
NEW.name,
NEW."$.NAME",
NEW.descr,
NEW."$.DESCR",
NEW.ID,
NEW."$.ID",
NEW.parent_ID,
NEW."$.PARENT_ID",
NEW."$.CHILDREN"
FROM
JSON_TABLE(
:JSON,
'$.children[*]' COLUMNS(
name NVARCHAR(1020) PATH '$.name',
"$.NAME" NVARCHAR(2147483647) FORMAT JSON PATH '$.name',
descr NVARCHAR(4000) PATH '$.descr',
"$.DESCR" NVARCHAR(2147483647) FORMAT JSON PATH '$.descr',
ID INT PATH '$.ID',
"$.ID" NVARCHAR(2147483647) FORMAT JSON PATH '$.ID',
parent_ID INT PATH '$.parent_ID',
"$.PARENT_ID" NVARCHAR(2147483647) FORMAT JSON PATH '$.parent_ID',
"$.CHILDREN" NVARCHAR(2147483647) FORMAT JSON PATH '$.children'
)
ERROR ON ERROR
) AS NEW
UNION ALL
SELECT
NEW.name,
NEW."$.NAME",
NEW.descr,
NEW."$.DESCR",
NEW.ID,
NEW."$.ID",
NEW.parent_ID,
NEW."$.PARENT_ID",
NEW."$.CHILDREN"
FROM
JSON_TABLE(
:JSON,
'$.children[*].children[*]' COLUMNS(
name NVARCHAR(1020) PATH '$.name',
"$.NAME" NVARCHAR(2147483647) FORMAT JSON PATH '$.name',
descr NVARCHAR(4000) PATH '$.descr',
"$.DESCR" NVARCHAR(2147483647) FORMAT JSON PATH '$.descr',
ID INT PATH '$.ID',
"$.ID" NVARCHAR(2147483647) FORMAT JSON PATH '$.ID',
parent_ID INT PATH '$.parent_ID',
"$.PARENT_ID" NVARCHAR(2147483647) FORMAT JSON PATH '$.parent_ID',
"$.CHILDREN" NVARCHAR(2147483647) FORMAT JSON PATH '$.children'
)
ERROR ON ERROR
) AS NEW;

-- DELETE all children of parents that are no longer in the dataset
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

where are rows deleted which are not children of parents, but which are not part of the dataset?

E.g.

JSON input:

ID parent_ID
1 null
11 1
12 1
2 null
21 2
22 2

Existing data:

ID parent_ID will be deleted?
13 1 yes
3 null no

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note: They might be children of other entities and would not have to be deleted.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thinking about it again, we can exclude non-children entities since compositions, by definition, are children and one needs to consider only children and children's children of the root instance.

We should also look at the case of a composition of one where the foreign key is part of the parent. Here, one cannot identify the previous child purely based on the payload and one needs to take into account the foreign keys of existing data.

DELETE FROM TestService_Genres WHERE
(parent_ID) IN (SELECT ID FROM :Genres WHERE "$.CHILDREN" IS NOT NULL)
AND
(ID) NOT IN (SELECT ID FROM :Genres);

-- UPSERT new deep genres entries
UPSERT sap_capire_bookshop_Genres (name, descr, ID, parent_ID)
SELECT
CASE
WHEN OLD.ID IS NULL THEN NEW.name
ELSE (
CASE
WHEN "$.NAME" IS NULL THEN OLD.name
ELSE NEW.name
END
)
END as name,
CASE
WHEN OLD.ID IS NULL THEN NEW.descr
ELSE (
CASE
WHEN "$.DESCR" IS NULL THEN OLD.descr
ELSE NEW.descr
END
)
END as descr,
NEW.ID as ID,
CASE
WHEN OLD.ID IS NULL THEN NEW.parent_ID
ELSE (
CASE
WHEN "$.PARENT_ID" IS NULL THEN OLD.parent_ID
ELSE NEW.parent_ID
END
)
END as parent_ID
FROM
:Genres AS NEW
LEFT JOIN sap_capire_bookshop_Genres AS OLD ON NEW.ID = OLD.ID;
END;
Loading