Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: static deep operations #414

Draft
wants to merge 4 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 28 additions & 5 deletions db-service/lib/SQLService.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,20 @@ const BINARY_TYPES = {
* @returns {Promise<unknown>}
*/

const deepSQL = true

class SQLService extends DatabaseService {
init() {
this.on(['INSERT', 'UPSERT', 'UPDATE'], require('./fill-in-keys')) // REVISIT should be replaced by correct input processing eventually
this.on(['INSERT', 'UPSERT', 'UPDATE'], require('./deep-queries').onDeep)
this.on([/*'INSERT', 'UPSERT',*/ 'UPDATE'], require('./fill-in-keys')) // REVISIT should be replaced by correct input processing eventually
this.on([/*'INSERT', 'UPSERT',*/ 'UPDATE'], require('./deep-queries').onDeep)

this._deepSQL = false

if (!deepSQL) {
this.on(['INSERT', 'UPSERT'], require('@cap-js/db-service/lib/fill-in-keys')) // REVISIT should be replaced by correct input processing eventually
this.on(['INSERT', 'UPSERT'], require('@cap-js/db-service/lib/deep-queries').onDeep)
}

if (cds.env.features.db_strict) {
this.before(['INSERT', 'UPSERT', 'UPDATE'], ({ query }) => {
const elements = query.target?.elements; if (!elements) return
Expand Down Expand Up @@ -165,7 +175,20 @@ class SQLService extends DatabaseService {
* @type {Handler}
*/
async onINSERT({ query, data }) {
const { sql, entries, cqn } = this.cqn2sql(query, data)
if (deepSQL && query.INSERT.entries) {
const exec = require('./deep2flat').call(this, query)
try {
const result = await exec.call(this, Readable, query.INSERT.entries)
return result[0]
} catch (e) {
e.query = exec + ''
throw e
}
}
return this._insert(this.cqn2sql(query, data))
}

async _insert({ sql, entries, cqn }) {
if (!sql) return // Do nothing when there is nothing to be done // REVISIT: fix within mtxs
const ps = await this.prepare(sql)
const results = entries ? await Promise.all(entries.map(e => ps.run(e))) : await ps.run()
Expand Down Expand Up @@ -222,7 +245,7 @@ class SQLService extends DatabaseService {
if (where) {
let last = from.ref.at(-1)
if (last.where) [last, where] = [last.id, [{ xpr: last.where }, 'and', { xpr: where }]]
from = { ref: [...from.ref.slice(0, -1), { id: last, where }] }
from = { ref: [...from.ref.slice(0, -1), { id: last?.id || last, where }] }
}
// Process child compositions depth-first
let { depth = 0, visited = [] } = req
Expand Down Expand Up @@ -352,7 +375,7 @@ class SQLService extends DatabaseService {
cqn2sql(query, values) {
let q = this.cqn4sql(query)
let kind = q.kind || Object.keys(q)[0]
if (kind in { INSERT: 1, DELETE: 1, UPSERT: 1, UPDATE: 1 }) {
if (q.target && kind in { INSERT: 1, DELETE: 1, UPSERT: 1, UPDATE: 1 }) {
q = resolveView(q, this.model, this) // REVISIT: before resolveView was called on flat cqn obtained from cqn4sql -> is it correct to call on original q instead?
let target = q[kind]._transitions?.[0].target
if (target) q.target = target // REVISIT: Why isn't that done in resolveView?
Expand Down
4 changes: 3 additions & 1 deletion db-service/lib/cqn2sql.js
Original file line number Diff line number Diff line change
Expand Up @@ -345,6 +345,8 @@ class CQN2SQLRenderer {
if (from.SELECT) return _aliased(`(${this.SELECT(from)})`)
if (from.join)
return `${this.from(from.args[0])} ${from.join} JOIN ${this.from(from.args[1])} ON ${this.where(from.on)}`
if (from.func)
return this.func(from)
}

/**
Expand Down Expand Up @@ -517,7 +519,7 @@ class CQN2SQLRenderer {
}

async *INSERT_entries_stream(entries, binaryEncoding = 'base64') {
const elements = this.cqn.target?.elements || {}
const elements = this.cqn?.target?.elements || {}
const transformBase64 = binaryEncoding === 'base64'
? a => a
: a => a != null ? Buffer.from(a, 'base64').toString(binaryEncoding) : a
Expand Down
2 changes: 1 addition & 1 deletion db-service/lib/deep-queries.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ const hasDeep = (q, target) => {

// unofficial config!
const DEEP_DELETE_MAX_RECURSION_DEPTH =
(cds.env.features.recursion_depth && Number(cds.env.features.recursion_depth)) || 4 // we use 4 here as our test data has a max depth of 3
(cds.env.features.recursion_depth && Number(cds.env.features.recursion_depth)) || 10 // we use 4 here as our test data has a max depth of 3

// IMPORTANT: Skip only if @cds.persistence.skip is `true` → e.g. this skips skipping targets marked with @cds.persistence.skip: 'if-unused'
const _hasPersistenceSkip = target => target?.['@cds.persistence.skip'] === true
Expand Down
109 changes: 109 additions & 0 deletions db-service/lib/deep2flat.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
const cds = require('@sap/cds/lib')

const OP = {}

module.exports = function (q) {
const kind = q.kind || Object.keys(q)[0]
const ret = OP[kind].call(this, q)
if (ret?.length > 1) {
const func = new Function(['Readable', 'children'], ret)
return func
}
}

OP.INSERT = function (q, path = [], targets = {}) {
const kind = q.kind || Object.keys(q)[0]
const INSERT = q[kind] || q.INSERT || q.UPSERT
const { target } = q
// Only INSERT.entries get deep logic
if (INSERT.rows) return ''
const { compositions } = target

let into = INSERT.into
if (typeof into === 'string') into = { ref: [into] }

if (path.find(c => c.name === q.target.name)) return ''
const isRoot = path.length === 0
path.push(q.target)
targets[q.target.name] = (targets[q.target.name] || 0) + 1

const label = `l${path.length}`
let js = `{
${isRoot ? `toStream = entries => {
const stream = Readable.from(this.class.CQN2SQL.prototype.INSERT_entries_stream(entries))
stream.type = 'json'
return stream
}` : ''}
${isRoot ? 'const entries = {}' : ''}
${isRoot ? `entries[${JSON.stringify(target.name)}] = children` : ''}
const parents = children`

const needDeep = {}
for (const c in compositions) {
const t = compositions[c].target
if (targets[t] === undefined) {
needDeep[t] = true
targets[t] = 0
}
}

// Compute all compositions
for (const c in compositions) {
const element = compositions[c]
const target = cds.model.definitions[element.target] // REVISIT: element._target is the actual reference

const ins = cds.ql.UPSERT.into({ ref: [...into.ref, c] })
const next = needDeep[target.name] ? OP.INSERT.call(this, ins, path, targets).replace(/\n/g, '\n ') : ''
/* TODO: for UPDATE / UPSERT
const del = cds.ql.DELETE.from({
ref: [...into.ref, {
id: c,
where: ['not', { list: ObjectKeys(target.keys).map(k => ({ ref: [k] })) }, 'in', { list: [] }]
}]
})
*/
js = `${js}
${label}:{
const children = entries[${JSON.stringify(target.name)}] ??= []
for(const p of parents) {
const child = p[${JSON.stringify(c)}]
if(!child) continue // TODO: throw clear error when child is not the correct type
${element.is2one ? 'c = child' : 'for(const c of child) {'}
${element._foreignKeys.map(l => `c[${JSON.stringify(l.childElement.name)}] = p[${JSON.stringify(l.parentElement.name)}]`).join(' \n')}
children.push(c)
${element.is2one ? '' : '}'}
}
${next ? `if(!children.length) break ${label}` : ''}
${next}
}
`
}

// Remove current target from path
path.pop()

if (isRoot) {
const queries = Object.keys(targets).map(t => {
const { sql } = this.cqn2sql(cds.ql.INSERT([]).into(t))
return `this._insert({
sql: ${JSON.stringify(sql)},
entries: [[toStream(entries[${JSON.stringify(t)}])]],
cqn: {INSERT:{into:{ref:[${JSON.stringify(t)}]}}}
})`
})
js = `${js}
return Promise.all([
${queries.join(',\n')}
])
}`
} else {
js = `${js}
}`
}

return js
}

OP.UPDATE = (/*{ UPDATE, target, elements }*/) => {
return []
}
44 changes: 36 additions & 8 deletions hana/lib/HANAService.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ const hanaKeywords = keywords.reduce((prev, curr) => {
const DEBUG = cds.debug('sql|db')
let HANAVERSION = 0

const deepSQL = false

/**
* @implements SQLService
*/
Expand All @@ -27,6 +29,13 @@ class HANAService extends SQLService {
super.deploy = this.hdiDeploy
}

this._deepSQL = deepSQL

if (!deepSQL) {
this.on(['INSERT', 'UPSERT'], require('@cap-js/db-service/lib/fill-in-keys')) // REVISIT should be replaced by correct input processing eventually
this.on(['INSERT', 'UPSERT'], require('@cap-js/db-service/lib/deep-queries').onDeep)
}

this.on(['BEGIN'], this.onBEGIN)
this.on(['COMMIT'], this.onCOMMIT)
this.on(['ROLLBACK'], this.onROLLBACK)
Expand Down Expand Up @@ -168,16 +177,29 @@ class HANAService extends SQLService {
}

async onINSERT({ query, data }) {
if (deepSQL && query.INSERT.entries) {
const sql = require('./deep2flat').call(this, query)
const entries = Readable.isReadable(query.INSERT.entries[0])
? [query.INSERT.entries]
: [[Readable.from(this.class.CQN2SQL.prototype.INSERT_entries_stream(query.INSERT.entries), { objectMode: false })]]
return this._insert({ sql, entries, cqn: query })
}
return this._insert(this.cqn2sql(query, data))
}

async _insert({ sql, entries, cqn }) {
try {
const { sql, entries, cqn } = this.cqn2sql(query, data)
if (!sql) return // Do nothing when there is nothing to be done
const ps = await this.prepare(sql)
// HANA driver supports batch execution
const results = await (entries
? HANAVERSION <= 2
? entries.reduce((l, c) => l.then(() => this.ensureDBC() && ps.run(c)), Promise.resolve(0))
: entries.length > 1 ? this.ensureDBC() && await ps.runBatch(entries) : this.ensureDBC() && await ps.run(entries[0])
: this.ensureDBC() && ps.run())
const results = await (
sql.startsWith('DO')
? this.ensureDBC() && (await ps.proc(entries[0], [{ PARAMETER_NAME: 'result' }])).result[0]
: entries
? HANAVERSION <= 2
? entries.reduce((l, c) => l.then(() => this.ensureDBC() && ps.run(c)), Promise.resolve(0))
: entries.length > 1 ? this.ensureDBC() && await ps.runBatch(entries) : this.ensureDBC() && await ps.run(entries[0])
: this.ensureDBC() && ps.run())
return new this.class.InsertResults(cqn, results)
} catch (err) {
throw _not_unique(err, 'ENTITY_ALREADY_EXISTS')
Expand Down Expand Up @@ -655,7 +677,10 @@ class HANAService extends SQLService {
this.columns = columns

const extractions = this.managed(
columns.map(c => ({ name: c })),
(elements
? ObjectKeys(elements).filter(c => c in elements && !elements[c].virtual && !elements[c].value && !elements[c].isAssociation)
: columns
).map(c => ({ name: c })),
elements,
!!q.UPSERT,
)
Expand Down Expand Up @@ -683,6 +708,8 @@ class HANAService extends SQLService {
]]
}

this.extract = `SELECT _JSON_ as _JSON_,${converter} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(_JSON_ NVARCHAR(2147483647) FORMAT JSON PATH '$',${extraction}) ERROR ON ERROR)`

// WITH SRC is used to force HANA to interpret the ? as a NCLOB allowing for streaming of the data
// Additionally for drivers that did allow for streaming of NVARCHAR they quickly reached size limits
// This should allow for 2GB of data to be inserted
Expand Down Expand Up @@ -1066,7 +1093,7 @@ class HANAService extends SQLService {
const notManged = managed === undefined
return {
name,
column: `${extract}, ${this.quote('$.' + name)} NVARCHAR(2147483647) FORMAT JSON PATH '$.${name}'`,
column: notManged ? `${extract}` : `${extract},${this.quote('$.' + name)} NVARCHAR(2147483647) FORMAT JSON PATH '$.${name}'`,
// For @cds.on.insert ensure that there was no entry yet before setting managed in UPSERT
switch: notManged
? oldOrNew
Expand Down Expand Up @@ -1105,6 +1132,7 @@ class HANAService extends SQLService {
LargeBinary: () => `NVARCHAR(2147483647)`,
Binary: () => `NVARCHAR(2147483647)`,
array: () => `NVARCHAR(2147483647) FORMAT JSON`,
Composition: () => `NVARCHAR(2147483647) FORMAT JSON`,
Vector: () => `NVARCHAR(2147483647)`,
Decimal: () => `DECIMAL`,

Expand Down
Loading
Loading