Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add tile sort order to bulk data manager #11638

Merged
merged 12 commits into from
Nov 25, 2024
4 changes: 3 additions & 1 deletion arches/app/etl_modules/branch_excel_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,8 +205,9 @@ def process_worksheet(self, worksheet, cursor, node_lookup, nodegroup_lookup):
tile_value_json, passes_validation = self.create_tile_value(
cell_values, data_node_lookup, node_lookup, row_details, cursor
)
sortorder = 0
cursor.execute(
"""INSERT INTO load_staging (nodegroupid, legacyid, resourceid, tileid, parenttileid, value, loadid, nodegroup_depth, source_description, passes_validation, operation) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""",
"""INSERT INTO load_staging (nodegroupid, legacyid, resourceid, tileid, parenttileid, value, loadid, nodegroup_depth, source_description, passes_validation, operation, sortorder) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""",
(
row_details["nodegroup_id"],
legacyid,
Expand All @@ -221,6 +222,7 @@ def process_worksheet(self, worksheet, cursor, node_lookup, nodegroup_lookup):
), # source_description
passes_validation,
operation,
sortorder,
),
)
except KeyError:
Expand Down
4 changes: 2 additions & 2 deletions arches/app/etl_modules/bulk_edit_concept.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,8 +393,8 @@ def stage_data(
try:
sql = (
"""
INSERT INTO load_staging (value, tileid, nodegroupid, parenttileid, resourceid, loadid, nodegroup_depth, source_description, operation, passes_validation)
(SELECT tiledata, tileid, nodegroupid, parenttileid, resourceinstanceid, %(load_id)s, 0, 'bulk_edit', 'update', true
INSERT INTO load_staging (value, tileid, nodegroupid, parenttileid, resourceid, loadid, nodegroup_depth, source_description, operation, passes_validation, sortorder)
(SELECT tiledata, tileid, nodegroupid, parenttileid, resourceinstanceid, %(load_id)s, 0, 'bulk_edit', 'update', true, sortorder
FROM tiles
WHERE nodegroupid in (SELECT nodegroupid FROM nodes WHERE nodeid = %(node_id)s)
AND tiledata -> %(node_id)s ? %(old_id)s
Expand Down
6 changes: 4 additions & 2 deletions arches/app/etl_modules/import_single_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -555,8 +555,9 @@ def populate_staging_table(
nodegroup_depth,
source_description,
operation,
passes_validation
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""",
passes_validation,
sortorder
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""",
(
nodegroup,
legacyid,
Expand All @@ -568,6 +569,7 @@ def populate_staging_table(
csv_file_name,
"insert",
passes_validation,
0,
),
)

Expand Down
6 changes: 3 additions & 3 deletions arches/app/etl_modules/tile_excel_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@ def run_load_task_async(self, request):

def create_tile_value(
self,
cell_values,
data_node_lookup,
node_lookup,
nodegroup_alias,
Expand Down Expand Up @@ -176,6 +175,7 @@ def process_worksheet(self, worksheet, cursor, node_lookup, nodegroup_lookup):
raise ValueError(_("All rows must have a valid resource id"))

node_values = cell_values[3:-3]
sortorder = cell_values[-3] if cell_values[-3] else 0
try:
row_count += 1
row_details = dict(zip(data_node_lookup[nodegroup_alias], node_values))
Expand All @@ -194,7 +194,6 @@ def process_worksheet(self, worksheet, cursor, node_lookup, nodegroup_lookup):
)
legacyid, resourceid = self.set_legacy_id(resourceid)
tile_value_json, passes_validation = self.create_tile_value(
cell_values,
data_node_lookup,
node_lookup,
nodegroup_alias,
Expand All @@ -214,7 +213,7 @@ def process_worksheet(self, worksheet, cursor, node_lookup, nodegroup_lookup):
if TileModel.objects.filter(pk=tileid).exists():
operation = "update"
cursor.execute(
"""INSERT INTO load_staging (nodegroupid, legacyid, resourceid, tileid, parenttileid, value, loadid, nodegroup_depth, source_description, passes_validation, operation) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""",
"""INSERT INTO load_staging (nodegroupid, legacyid, resourceid, tileid, parenttileid, value, loadid, nodegroup_depth, source_description, passes_validation, operation, sortorder) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""",
(
row_details["nodegroup_id"],
legacyid,
Expand All @@ -229,6 +228,7 @@ def process_worksheet(self, worksheet, cursor, node_lookup, nodegroup_lookup):
), # source_description
passes_validation,
operation,
sortorder,
),
)
except KeyError:
Expand Down
Loading