diff --git a/db-service/lib/SQLService.js b/db-service/lib/SQLService.js index 117a5fe5b..84192ab5e 100644 --- a/db-service/lib/SQLService.js +++ b/db-service/lib/SQLService.js @@ -19,10 +19,20 @@ const BINARY_TYPES = { * @returns {Promise} */ +const deepSQL = true + class SQLService extends DatabaseService { init() { - this.on(['INSERT', 'UPSERT', 'UPDATE'], require('./fill-in-keys')) // REVISIT should be replaced by correct input processing eventually - this.on(['INSERT', 'UPSERT', 'UPDATE'], require('./deep-queries').onDeep) + this.on([/*'INSERT', 'UPSERT',*/ 'UPDATE'], require('./fill-in-keys')) // REVISIT should be replaced by correct input processing eventually + this.on([/*'INSERT', 'UPSERT',*/ 'UPDATE'], require('./deep-queries').onDeep) + + this._deepSQL = false + + if (!deepSQL) { + this.on(['INSERT', 'UPSERT'], require('@cap-js/db-service/lib/fill-in-keys')) // REVISIT should be replaced by correct input processing eventually + this.on(['INSERT', 'UPSERT'], require('@cap-js/db-service/lib/deep-queries').onDeep) + } + if (cds.env.features.db_strict) { this.before(['INSERT', 'UPSERT', 'UPDATE'], ({ query }) => { const elements = query.target?.elements; if (!elements) return @@ -165,7 +175,20 @@ class SQLService extends DatabaseService { * @type {Handler} */ async onINSERT({ query, data }) { - const { sql, entries, cqn } = this.cqn2sql(query, data) + if (deepSQL && query.INSERT.entries) { + const exec = require('./deep2flat').call(this, query) + try { + const result = await exec.call(this, Readable, query.INSERT.entries) + return result[0] + } catch (e) { + e.query = exec + '' + throw e + } + } + return this._insert(this.cqn2sql(query, data)) + } + + async _insert({ sql, entries, cqn }) { if (!sql) return // Do nothing when there is nothing to be done // REVISIT: fix within mtxs const ps = await this.prepare(sql) const results = entries ? await Promise.all(entries.map(e => ps.run(e))) : await ps.run() @@ -222,7 +245,7 @@ class SQLService extends DatabaseService { if (where) { let last = from.ref.at(-1) if (last.where) [last, where] = [last.id, [{ xpr: last.where }, 'and', { xpr: where }]] - from = { ref: [...from.ref.slice(0, -1), { id: last, where }] } + from = { ref: [...from.ref.slice(0, -1), { id: last?.id || last, where }] } } // Process child compositions depth-first let { depth = 0, visited = [] } = req @@ -352,7 +375,7 @@ class SQLService extends DatabaseService { cqn2sql(query, values) { let q = this.cqn4sql(query) let kind = q.kind || Object.keys(q)[0] - if (kind in { INSERT: 1, DELETE: 1, UPSERT: 1, UPDATE: 1 }) { + if (q.target && kind in { INSERT: 1, DELETE: 1, UPSERT: 1, UPDATE: 1 }) { q = resolveView(q, this.model, this) // REVISIT: before resolveView was called on flat cqn obtained from cqn4sql -> is it correct to call on original q instead? let target = q[kind]._transitions?.[0].target if (target) q.target = target // REVISIT: Why isn't that done in resolveView? diff --git a/db-service/lib/cqn2sql.js b/db-service/lib/cqn2sql.js index 1d8468e91..8a9def190 100644 --- a/db-service/lib/cqn2sql.js +++ b/db-service/lib/cqn2sql.js @@ -345,6 +345,8 @@ class CQN2SQLRenderer { if (from.SELECT) return _aliased(`(${this.SELECT(from)})`) if (from.join) return `${this.from(from.args[0])} ${from.join} JOIN ${this.from(from.args[1])} ON ${this.where(from.on)}` + if (from.func) + return this.func(from) } /** @@ -517,7 +519,7 @@ class CQN2SQLRenderer { } async *INSERT_entries_stream(entries, binaryEncoding = 'base64') { - const elements = this.cqn.target?.elements || {} + const elements = this.cqn?.target?.elements || {} const transformBase64 = binaryEncoding === 'base64' ? a => a : a => a != null ? Buffer.from(a, 'base64').toString(binaryEncoding) : a diff --git a/db-service/lib/deep-queries.js b/db-service/lib/deep-queries.js index 25d5312b4..0387d38ff 100644 --- a/db-service/lib/deep-queries.js +++ b/db-service/lib/deep-queries.js @@ -74,7 +74,7 @@ const hasDeep = (q, target) => { // unofficial config! const DEEP_DELETE_MAX_RECURSION_DEPTH = - (cds.env.features.recursion_depth && Number(cds.env.features.recursion_depth)) || 4 // we use 4 here as our test data has a max depth of 3 + (cds.env.features.recursion_depth && Number(cds.env.features.recursion_depth)) || 10 // we use 4 here as our test data has a max depth of 3 // IMPORTANT: Skip only if @cds.persistence.skip is `true` → e.g. this skips skipping targets marked with @cds.persistence.skip: 'if-unused' const _hasPersistenceSkip = target => target?.['@cds.persistence.skip'] === true diff --git a/db-service/lib/deep2flat.js b/db-service/lib/deep2flat.js new file mode 100644 index 000000000..6f1259d55 --- /dev/null +++ b/db-service/lib/deep2flat.js @@ -0,0 +1,109 @@ +const cds = require('@sap/cds/lib') + +const OP = {} + +module.exports = function (q) { + const kind = q.kind || Object.keys(q)[0] + const ret = OP[kind].call(this, q) + if (ret?.length > 1) { + const func = new Function(['Readable', 'children'], ret) + return func + } +} + +OP.INSERT = function (q, path = [], targets = {}) { + const kind = q.kind || Object.keys(q)[0] + const INSERT = q[kind] || q.INSERT || q.UPSERT + const { target } = q + // Only INSERT.entries get deep logic + if (INSERT.rows) return '' + const { compositions } = target + + let into = INSERT.into + if (typeof into === 'string') into = { ref: [into] } + + if (path.find(c => c.name === q.target.name)) return '' + const isRoot = path.length === 0 + path.push(q.target) + targets[q.target.name] = (targets[q.target.name] || 0) + 1 + + const label = `l${path.length}` + let js = `{ + ${isRoot ? `toStream = entries => { + const stream = Readable.from(this.class.CQN2SQL.prototype.INSERT_entries_stream(entries)) + stream.type = 'json' + return stream +}` : ''} + ${isRoot ? 'const entries = {}' : ''} + ${isRoot ? `entries[${JSON.stringify(target.name)}] = children` : ''} + const parents = children` + + const needDeep = {} + for (const c in compositions) { + const t = compositions[c].target + if (targets[t] === undefined) { + needDeep[t] = true + targets[t] = 0 + } + } + + // Compute all compositions + for (const c in compositions) { + const element = compositions[c] + const target = cds.model.definitions[element.target] // REVISIT: element._target is the actual reference + + const ins = cds.ql.UPSERT.into({ ref: [...into.ref, c] }) + const next = needDeep[target.name] ? OP.INSERT.call(this, ins, path, targets).replace(/\n/g, '\n ') : '' + /* TODO: for UPDATE / UPSERT + const del = cds.ql.DELETE.from({ + ref: [...into.ref, { + id: c, + where: ['not', { list: ObjectKeys(target.keys).map(k => ({ ref: [k] })) }, 'in', { list: [] }] + }] + }) + */ + js = `${js} + ${label}:{ + const children = entries[${JSON.stringify(target.name)}] ??= [] + for(const p of parents) { + const child = p[${JSON.stringify(c)}] + if(!child) continue // TODO: throw clear error when child is not the correct type + ${element.is2one ? 'c = child' : 'for(const c of child) {'} + ${element._foreignKeys.map(l => `c[${JSON.stringify(l.childElement.name)}] = p[${JSON.stringify(l.parentElement.name)}]`).join(' \n')} + children.push(c) + ${element.is2one ? '' : '}'} + } + ${next ? `if(!children.length) break ${label}` : ''} + ${next} + } +` + } + + // Remove current target from path + path.pop() + + if (isRoot) { + const queries = Object.keys(targets).map(t => { + const { sql } = this.cqn2sql(cds.ql.INSERT([]).into(t)) + return `this._insert({ + sql: ${JSON.stringify(sql)}, + entries: [[toStream(entries[${JSON.stringify(t)}])]], + cqn: {INSERT:{into:{ref:[${JSON.stringify(t)}]}}} +})` + }) + js = `${js} + return Promise.all([ + ${queries.join(',\n')} + ]) +}` + } else { + js = `${js} +}` + } + + return js +} + +OP.UPDATE = (/*{ UPDATE, target, elements }*/) => { + return [] +} diff --git a/hana/lib/HANAService.js b/hana/lib/HANAService.js index 80e6ba28c..ad5b781c4 100644 --- a/hana/lib/HANAService.js +++ b/hana/lib/HANAService.js @@ -16,6 +16,8 @@ const hanaKeywords = keywords.reduce((prev, curr) => { const DEBUG = cds.debug('sql|db') let HANAVERSION = 0 +const deepSQL = false + /** * @implements SQLService */ @@ -27,6 +29,13 @@ class HANAService extends SQLService { super.deploy = this.hdiDeploy } + this._deepSQL = deepSQL + + if (!deepSQL) { + this.on(['INSERT', 'UPSERT'], require('@cap-js/db-service/lib/fill-in-keys')) // REVISIT should be replaced by correct input processing eventually + this.on(['INSERT', 'UPSERT'], require('@cap-js/db-service/lib/deep-queries').onDeep) + } + this.on(['BEGIN'], this.onBEGIN) this.on(['COMMIT'], this.onCOMMIT) this.on(['ROLLBACK'], this.onROLLBACK) @@ -168,16 +177,29 @@ class HANAService extends SQLService { } async onINSERT({ query, data }) { + if (deepSQL && query.INSERT.entries) { + const sql = require('./deep2flat').call(this, query) + const entries = Readable.isReadable(query.INSERT.entries[0]) + ? [query.INSERT.entries] + : [[Readable.from(this.class.CQN2SQL.prototype.INSERT_entries_stream(query.INSERT.entries), { objectMode: false })]] + return this._insert({ sql, entries, cqn: query }) + } + return this._insert(this.cqn2sql(query, data)) + } + + async _insert({ sql, entries, cqn }) { try { - const { sql, entries, cqn } = this.cqn2sql(query, data) if (!sql) return // Do nothing when there is nothing to be done const ps = await this.prepare(sql) // HANA driver supports batch execution - const results = await (entries - ? HANAVERSION <= 2 - ? entries.reduce((l, c) => l.then(() => this.ensureDBC() && ps.run(c)), Promise.resolve(0)) - : entries.length > 1 ? this.ensureDBC() && await ps.runBatch(entries) : this.ensureDBC() && await ps.run(entries[0]) - : this.ensureDBC() && ps.run()) + const results = await ( + sql.startsWith('DO') + ? this.ensureDBC() && (await ps.proc(entries[0], [{ PARAMETER_NAME: 'result' }])).result[0] + : entries + ? HANAVERSION <= 2 + ? entries.reduce((l, c) => l.then(() => this.ensureDBC() && ps.run(c)), Promise.resolve(0)) + : entries.length > 1 ? this.ensureDBC() && await ps.runBatch(entries) : this.ensureDBC() && await ps.run(entries[0]) + : this.ensureDBC() && ps.run()) return new this.class.InsertResults(cqn, results) } catch (err) { throw _not_unique(err, 'ENTITY_ALREADY_EXISTS') @@ -655,7 +677,10 @@ class HANAService extends SQLService { this.columns = columns const extractions = this.managed( - columns.map(c => ({ name: c })), + (elements + ? ObjectKeys(elements).filter(c => c in elements && !elements[c].virtual && !elements[c].value && !elements[c].isAssociation) + : columns + ).map(c => ({ name: c })), elements, !!q.UPSERT, ) @@ -683,6 +708,8 @@ class HANAService extends SQLService { ]] } + this.extract = `SELECT _JSON_ as _JSON_,${converter} FROM JSON_TABLE(SRC.JSON, '$' COLUMNS(_JSON_ NVARCHAR(2147483647) FORMAT JSON PATH '$',${extraction}) ERROR ON ERROR)` + // WITH SRC is used to force HANA to interpret the ? as a NCLOB allowing for streaming of the data // Additionally for drivers that did allow for streaming of NVARCHAR they quickly reached size limits // This should allow for 2GB of data to be inserted @@ -1066,7 +1093,7 @@ class HANAService extends SQLService { const notManged = managed === undefined return { name, - column: `${extract}, ${this.quote('$.' + name)} NVARCHAR(2147483647) FORMAT JSON PATH '$.${name}'`, + column: notManged ? `${extract}` : `${extract},${this.quote('$.' + name)} NVARCHAR(2147483647) FORMAT JSON PATH '$.${name}'`, // For @cds.on.insert ensure that there was no entry yet before setting managed in UPSERT switch: notManged ? oldOrNew @@ -1105,6 +1132,7 @@ class HANAService extends SQLService { LargeBinary: () => `NVARCHAR(2147483647)`, Binary: () => `NVARCHAR(2147483647)`, array: () => `NVARCHAR(2147483647) FORMAT JSON`, + Composition: () => `NVARCHAR(2147483647) FORMAT JSON`, Vector: () => `NVARCHAR(2147483647)`, Decimal: () => `DECIMAL`, diff --git a/hana/lib/deep2flat.js b/hana/lib/deep2flat.js new file mode 100644 index 000000000..1fe9663ee --- /dev/null +++ b/hana/lib/deep2flat.js @@ -0,0 +1,130 @@ +const cds = require('@sap/cds/lib') + +const OP = {} + +module.exports = function (q) { + const kind = q.kind || Object.keys(q)[0] + const ret = OP[kind].call(this, q) + return ret +} + +OP.INSERT = function (q, path = [], targets = {}) { + const name = n => n.replace(/\./g, '_') + + const kind = q.kind || Object.keys(q)[0] + const INSERT = q[kind] || q.INSERT || q.UPSERT + const { target } = q + // Only INSERT.entries get deep logic + // if (INSERT.rows) return '' + const { compositions } = target + + let into = INSERT.into + if (typeof into === 'string') into = { ref: [into] } + + if (path.find(c => c.name === q.target.name)) return '' + const isRoot = path.length === 0 + path.push(q.target) + targets[q.target.name] = targets[q.target.name] || { count: 0 } + targets[q.target.name].count += 1 + + const label = `l${path.length}` + const extract = this.cqn2sql(q) + .extract + .replace('SRC.JSON', ':input') + .trim() + let sql = '' + /* + let sql = !isRoot + ? '' + : ` +DO (IN input NCLOB => ?) +BEGIN + DECLARE v_${label}_index INT = 0; + DECLARE v_${label}_last_index INT = -1; + + v_${name(q.target.name)} = ${extract}; +`*/ + + const needDeep = {} + for (const c in compositions) { + const t = compositions[c].target + if (targets[t] === undefined) { + needDeep[t] = true + targets[t] = { count: 0 } + } + } + + // Compute all compositions + for (const c in compositions) { + const element = compositions[c] + const target = cds.model.definitions[element.target] // REVISIT: element._target is the actual reference + + const ins = cds.ql.INSERT([]).into({ ref: [...into.ref, c] }) + const next = needDeep[target.name] ? OP.INSERT.call(this, ins, path, targets).replace(/\n/g, '\n ') : '' + /* TODO: for UPDATE / UPSERT + const del = cds.ql.DELETE.from({ + ref: [...into.ref, { + id: c, + where: ['not', { list: ObjectKeys(target.keys).map(k => ({ ref: [k] })) }, 'in', { list: [] }] + }] + }) + */ + const cqn2sql = this.cqn2sql(ins) + let extract = cqn2sql.extract.trim() + targets[target.name].extract = extract + targets[target.name].columns = cqn2sql.columns + + const parentMapping = [] + for (const foreignKey of element._foreignKeys) { + const cname = foreignKey.childElement.name + const pname = foreignKey.parentElement.name + const org = new RegExp(`,${cname} ([^ ]*) PATH '\\$\\.${cname}'`).exec(extract) + extract = extract.replace(org[0], '') // TODO: properly quote column name + parentMapping.push(`${cname} ${org[1]} PATH '$.${pname}'`) + } + + sql = `${sql} + WHILE record_count(:v_${name(target.name)}) > 0 DO + INSERT INTO ${name(target.name)} (${cqn2sql.columns}) SELECT ${cqn2sql.columns} FROM :v_${name(target.name)}; + v_${name(target.name)} = + WITH SRC AS (SELECT _JSON_ as JSON FROM :v_${name(q.target.name)}) + ${extract.replace(`'$' COLUMNS(`, `'$$' COLUMNS(${parentMapping}, ${c} NVARCHAR(2147483647) FORMAT JSON PATH '$$.${c}', NESTED PATH '$$.${c}[*]' COLUMNS(`).replace(') ERROR ON ERROR)', ')) ERROR ON ERROR)')} + WHERE LENGTH(${c}) > 2; + END WHILE; +` + } + + // Remove current target from path + path.pop() + + if (isRoot) { + const tableValues = Object.keys(targets) + .map(t => `v_${name(t)} = ${targets[t].extract.replace('SRC.JSON', q.target.name === t ? ':input' : "'[]'")};`) + const finalInserts = [] || Object.keys(targets) + .map(t => `INSERT INTO ${name(t)} (${targets[t].columns}) SELECT ${targets[t].columns} FROM :v_${name(t)};`) + + sql = `DO (IN input NCLOB => ?) +BEGIN + DECLARE v_changes INT = 0; + DECLARE v_${label}_index INT = 0; + DECLARE v_${label}_last_index INT = -1; + + ${tableValues.join('\n')} + + SELECT COUNT(*) INTO v_changes FROM :v_${name(q.target.name)}; +${sql} + + --SELECT * FROM :v_${name(q.target.name)}; + ${finalInserts.join('\n')} + SELECT v_changes as "changes" FROM DUMMY; +END;` + } else { + sql = `${sql}` + } + + return sql +} + +OP.UPDATE = (/*{ UPDATE, target, elements }*/) => { + return [] +} diff --git a/hana/lib/drivers/hana-client.js b/hana/lib/drivers/hana-client.js index 0a7e3408e..b36bff5d6 100644 --- a/hana/lib/drivers/hana-client.js +++ b/hana/lib/drivers/hana-client.js @@ -130,9 +130,11 @@ class HANAClientDriver extends driver { return { changes } } - ret.proc = async (data, outParameters) => { + ret.proc = async (params, outParameters) => { + const { values, streams } = this._extractStreams(params) const stmt = await ret._prep - const rows = await prom(stmt, 'execQuery')(data) + const rows = await prom(stmt, 'execQuery')(values) + await this._sendStreams(stmt, streams) return this._getResultForProcedure(rows, outParameters, stmt) } diff --git a/sqlite/lib/SQLiteService.js b/sqlite/lib/SQLiteService.js index 899019e3f..32da9aa9b 100644 --- a/sqlite/lib/SQLiteService.js +++ b/sqlite/lib/SQLiteService.js @@ -278,7 +278,7 @@ class SQLiteService extends SQLService { // } function _not_unique(err, code) { - if (err.message.match(/unique constraint/i)) + if (err.message?.match(/unique constraint/i)) return Object.assign(err, { originalMessage: err.message, // FIXME: required because of next line message: code, // FIXME: misusing message as code diff --git a/test/scenarios/bookshop/deep-insert-mapped.sql b/test/scenarios/bookshop/deep-insert-mapped.sql new file mode 100644 index 000000000..78d11afb2 --- /dev/null +++ b/test/scenarios/bookshop/deep-insert-mapped.sql @@ -0,0 +1,33 @@ +DO (IN input NCLOB => ?) +BEGIN + DECLARE v_changes INT = 0; + DECLARE v_l1_index INT = 0; + DECLARE v_l1_last_index INT = -1; + + -- Parse the incoming root data + v_sap_capire_bookshop_Genres = SELECT name AS name,descr AS descr,ID AS ID,parent_ID AS parent_ID,_JSON_ AS _JSON_ FROM JSON_TABLE(:input, '$' COLUMNS(name NVARCHAR(1020) PATH '$.name',descr NVARCHAR(4000) PATH '$.descr',ID INT PATH '$.ID',parent_ID INT PATH '$.parent_ID',_JSON_ NVARCHAR(2147483647) FORMAT JSON PATH '$') ERROR ON ERROR); + + -- Take root level update count to return "changes" result + v_changes = record_count(:v_sap_capire_bookshop_Genres); + + -- This is bookshop.Genres and the composition is recursive so it need to keep going until no new genres are left + WHILE record_count(:v_sap_capire_bookshop_Genres) > 0 DO + -- Insert the current contents of "v_sap_capire_bookshop_Genres" as it will be overwritten in this loop + INSERT INTO sap_capire_bookshop_Genres (name,descr,ID,parent_ID) SELECT name,descr,ID,parent_ID FROM :v_sap_capire_bookshop_Genres; + -- Select all the children with their parent ID propogated (mostly the same as the root data JSON_TABLE, but with parent_ID prefixed) + v_sap_capire_bookshop_Genres = + WITH SRC AS (SELECT _JSON_ FROM :v_sap_capire_bookshop_Genres) + SELECT name AS name,descr AS descr,ID AS ID,parent_ID as parent_ID,_JSON_ AS _JSON_ FROM JSON_TABLE(SRC._JSON_, '$' COLUMNS(parent_ID INT PATH '$.ID', children NVARCHAR(2147483647) FORMAT JSON PATH '$.children', NESTED PATH '$.children[*]' COLUMNS(name NVARCHAR(1020) PATH '$.name',descr NVARCHAR(4000) PATH '$.descr',ID INT PATH '$.ID',_JSON_ NVARCHAR(2147483647) FORMAT JSON PATH '$')) ERROR ON ERROR) + WHERE LENGTH(children) > 2; -- Prevent parents to show up that have no children as "JSON_TABLE" does (SELECT * FROM PARENT LEFT JOIN PARENT.CHILDREN) So the parent also shows up when it does not have children + END WHILE; + + -- Removed texts as it is not being used currently + + -- Debugging output queries to see intermediate results: + -- SELECT * FROM :v_sap_capire_bookshop_Genres; + -- INSERT INTO sap_capire_bookshop_Genres (name,descr,ID,parent_ID) SELECT name,descr,ID,parent_ID FROM :v_sap_capire_bookshop_Genres; + -- INSERT INTO sap_capire_bookshop_Genres_texts (locale,name,descr,ID) SELECT locale,name,descr,ID FROM :v_sap_capire_bookshop_Genres_texts; + -- SELECT * FROM sap_capire_bookshop_Genres; + + SELECT v_changes as "changes" FROM DUMMY; +END; \ No newline at end of file diff --git a/test/scenarios/bookshop/delete.test.js b/test/scenarios/bookshop/delete.test.js index dc0f67238..2aff3fc50 100644 --- a/test/scenarios/bookshop/delete.test.js +++ b/test/scenarios/bookshop/delete.test.js @@ -16,7 +16,7 @@ describe('Bookshop - Delete', () => { { ID: 998 }, { ID: 1, - toB: { + toB: [{ ID: 12, toA: [{ ID: 121 }], toC: [ @@ -35,11 +35,11 @@ describe('Bookshop - Delete', () => { ], }, ], - }, - toC: { + }], + toC: [{ ID: 13, toA: [{ ID: 13 }], - }, + }], }, ]) const del = DELETE.from('sap.capire.bookshop.A').where('ID = 1') diff --git a/test/scenarios/bookshop/genres.test.js b/test/scenarios/bookshop/genres.test.js index b8ff03c57..954acf990 100644 --- a/test/scenarios/bookshop/genres.test.js +++ b/test/scenarios/bookshop/genres.test.js @@ -1,3 +1,6 @@ +const { Readable } = require('stream') +const streamConsumer = require('stream/consumers') + const cds = require('../../cds.js') const bookshop = require('path').resolve(__dirname, '../../bookshop') @@ -27,8 +30,89 @@ describe('Bookshop - Genres', () => { assert.deepEqual(beforeData.data, afterData.data) }) - test('Insert Genres', async () => { - const body = require('./genres.json') + test.only('Insert Genres', async () => { + throw new Error(`DON'T MERGE CURRENT STATE IT IS NOT CLEANED UP!!!`) + + const { Genres } = cds.entities('sap.capire.bookshop') + + // Large deep genres generation code + const maxID = 100000 + + let width = 1 + let height = 1 + + while (width ** height <= maxID) { + width++ + if (width ** height >= maxID) break + height++ + } + + let currentID = 1 + const makeGenreGenerator = function* (depth = 0) { + const ID = currentID++ + yield `{"ID":${ID},"name":"Genre ${ID}","children":[` + + depth++ + if (depth <= height) { + let sep = '' + for (let i = 0; i < width; i++) { + yield sep + sep = ',' + for (const chunk of makeGenreGenerator(depth)) { + yield chunk + } + } + } + + yield ']}' + } + + if (false) { + // Start hard coded experimental procedure + await cds.tx(async db => { + const bodyStream = () => Readable.from(makeGenreGenerator(), { objectMode: false }) + const body = await streamConsumer.text(bodyStream()) + + await db.begin() + + await db.exec(`${cds.utils.fs.readFileSync(__dirname + '/deep-insert-mapper.sql')}`) + const ps = await db.prepare(`${cds.utils.fs.readFileSync(__dirname + '/deep-insert-mapped.sql')}`) + for (let i = 0; i < 0; i++) { + await cds.ql.DELETE.from(Genres) + const s = performance.now() + const res = await ps.proc([body], [{ PARAMETER_NAME: 'ret' }]) + process.stdout.write(`INSERT MAPPED (rows ${currentID - 1}) ${performance.now() - s}\n`) + } + const after = await db.exec(`SELECT * FROM sap_capire_bookshop_Genres`) + // const proc = await cds.run(, [{ input: body }]) + }) + } + // Start of an actual test + + const db = await cds.connect.to('db') + const bodyStream = () => Readable.from(makeGenreGenerator(), { objectMode: false }) + const _bodyCache = await streamConsumer[db._deepSQL ? 'text' : 'json'](bodyStream()) + const body = () => db._deepSQL + ? Readable.from((function* () { yield _bodyCache })(), { objectMode: false }) + : _bodyCache + + for (let i = 0; i < 1000; i++) { + await cds.ql.DELETE.from(Genres)//.where('1=1') + const s = performance.now() + await cds.ql.INSERT(body()).into(Genres) + process.stdout.write(`DEEP INSERT (rows: ${currentID - 1}) ${performance.now() - s}\n`) + } + + // await cds.ql.INSERT({ ID: 1 }).into(Genres) + + // await cds.ql.UPDATE(Genres).data(body).where(`ID=${1}`) + // await cds.ql.UPDATE(Genres).data(body).where(`ID=${1}`) + + // const changes = await cds.ql.INSERT(body).into(Genres) + const after = await cds.ql.SELECT.from(Genres) + expect(after.length).to.equal(currentID - 1) + + const insertResponse = await POST('/test/Genres', body, admin) expect(insertResponse.status).to.be.eq(201)