diff --git a/.vscodeignore b/.vscodeignore index 16fb8d7..7c6e527 100644 --- a/.vscodeignore +++ b/.vscodeignore @@ -11,3 +11,4 @@ vsc-extension-quickstart.md **/*.map **/*.ts node_modules/** +scripts/** diff --git a/CHANGELOG.md b/CHANGELOG.md index 010912b..038878c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,26 @@ # Change Log +## Pre-Release (1.3.1) + +- Allow offline editing of SDFGs (adding / deleting elements etc.) +- Add auto-opening SDFG and instrumentation report preference to the extension + settings +- Allow exporting of SDFG transformations to JSON files +- Integrate the new local view for close-up memory reuse analysis +- Improved error reporting from the DaCe daemon +- Various bugfixes and improvements + +## 1.2 + +### 1.2.0 + +- Allow loading of custom transformations +- Enable specializing SDFGs through the SDFG Analysis panel +- Make the built-in minimap toggleable +- Adopt a pre-release system +- Support workspace trust +- Various bugfixes and improvements + ## 1.1 ### 1.1.0 @@ -94,7 +115,7 @@ - Provide interactive instrumentation of SDFGs. - Provide visualization of instrumentation reports on SDFGs. - - If a runtime report is generated, prompt the user to display it ontop of the + - If a runtime report is generated, prompt the user to display it ontop of the currently active SDFG. - Provide running of SDFGs. - Run SDFGs normally, or run with profiling - this runs N times and reports diff --git a/backend/dace_vscode/arith_ops.py b/backend/dace_vscode/arith_ops.py index faf56c7..b50b900 100644 --- a/backend/dace_vscode/arith_ops.py +++ b/backend/dace_vscode/arith_ops.py @@ -10,7 +10,11 @@ from dace import nodes, dtypes import sympy -from dace_vscode.utils import get_uuid, load_sdfg_from_json +from dace_vscode.utils import ( + get_uuid, + load_sdfg_from_json, + get_exception_message, +) def symeval(val, symbols): @@ -211,10 +215,26 @@ def get_arith_ops(sdfg_json): return loaded['error'] sdfg = loaded['sdfg'] - propagation.propagate_memlets_sdfg(sdfg) - - arith_map = {} - create_arith_ops_map(sdfg, arith_map, {}) - return { - 'arithOpsMap': arith_map, - } + try: + propagation.propagate_memlets_sdfg(sdfg) + except Exception as e: + return { + 'error': { + 'message': 'Failed to propagate memlets through SDFG', + 'details': get_exception_message(e), + }, + } + + try: + arith_map = {} + create_arith_ops_map(sdfg, arith_map, {}) + return { + 'arithOpsMap': arith_map, + } + except Exception as e: + return { + 'error': { + 'message': 'Failed to count arithmetic operations', + 'details': get_exception_message(e), + }, + } diff --git a/backend/dace_vscode/editing.py b/backend/dace_vscode/editing.py deleted file mode 100644 index db41fff..0000000 --- a/backend/dace_vscode/editing.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright 2020-2022 ETH Zurich and the DaCe-VSCode authors. -# All rights reserved. - -from dace import ( - nodes, SDFG, SDFGState, InterstateEdge, Memlet, dtypes -) -from dace_vscode.utils import ( - load_sdfg_from_json, - find_graph_element_by_uuid, - get_uuid, - disable_save_metadata, - restore_save_metadata, -) -import pydoc - - -def remove_sdfg_elements(sdfg_json, uuids): - from dace.sdfg.graph import Edge - - old_meta = disable_save_metadata() - - loaded = load_sdfg_from_json(sdfg_json) - if loaded['error'] is not None: - return loaded['error'] - sdfg = loaded['sdfg'] - - elements = [] - for uuid in uuids: - elements.append(find_graph_element_by_uuid(sdfg, uuid)) - - for element_ret in elements: - element = element_ret['element'] - parent = element_ret['parent'] - - if parent is not None and element is not None: - if isinstance(element, Edge): - parent.remove_edge(element) - else: - parent.remove_node(element) - else: - return { - 'error': { - 'message': 'Failed to delete element', - 'details': 'Element or parent not found', - }, - } - - new_sdfg = sdfg.to_json() - restore_save_metadata(old_meta) - - return { - 'sdfg': new_sdfg, - } - - -def insert_sdfg_element(sdfg_str, type, parent_uuid, edge_a_uuid): - sdfg_answer = load_sdfg_from_json(sdfg_str) - sdfg = sdfg_answer['sdfg'] - uuid = 'error' - ret = find_graph_element_by_uuid(sdfg, parent_uuid) - parent = ret['element'] - - libname = None - if type is not None and isinstance(type, str): - split_type = type.split('|') - if len(split_type) == 2: - type = split_type[0] - libname = split_type[1] - - if type == 'SDFGState': - if parent is None: - parent = sdfg - elif isinstance(parent, nodes.NestedSDFG): - parent = parent.sdfg - state = parent.add_state() - uuid = [get_uuid(state)] - elif type == 'AccessNode': - arrays = list(parent.parent.arrays.keys()) - if len(arrays) == 0: - parent.parent.add_array('tmp', [1], dtype=dtypes.float64) - arrays = list(parent.parent.arrays.keys()) - node = parent.add_access(arrays[0]) - uuid = [get_uuid(node, parent)] - elif type == 'Map': - map_entry, map_exit = parent.add_map('map', dict(i='0:1')) - uuid = [get_uuid(map_entry, parent), get_uuid(map_exit, parent)] - elif type == 'Consume': - consume_entry, consume_exit = parent.add_consume('consume', ('i', '1')) - uuid = [get_uuid(consume_entry, parent), get_uuid(consume_exit, parent)] - elif type == 'Tasklet': - tasklet = parent.add_tasklet( - name='placeholder', - inputs={'in'}, - outputs={'out'}, - code='') - uuid = [get_uuid(tasklet, parent)] - elif type == 'NestedSDFG': - sub_sdfg = SDFG('nested_sdfg') - sub_sdfg.add_array('in', [1], dtypes.float32) - sub_sdfg.add_array('out', [1], dtypes.float32) - - nsdfg = parent.add_nested_sdfg(sub_sdfg, sdfg, {'in'}, {'out'}) - uuid = [get_uuid(nsdfg, parent)] - elif type == 'LibraryNode': - if libname is None: - return { - 'error': { - 'message': 'Failed to add library node', - 'details': 'Must provide a valid library node type', - }, - } - libnode_class = pydoc.locate(libname) - libnode = libnode_class() - parent.add_node(libnode) - uuid = [get_uuid(libnode, parent)] - elif type == 'Edge': - edge_start_ret = find_graph_element_by_uuid(sdfg, edge_a_uuid) - edge_start = edge_start_ret['element'] - edge_parent = edge_start_ret['parent'] - if edge_start is not None: - if edge_parent is None: - edge_parent = sdfg - - if isinstance(edge_parent, SDFGState): - if not (isinstance(edge_start, nodes.Node) and - isinstance(parent, nodes.Node)): - return { - 'error': { - 'message': 'Failed to add edge', - 'details': 'Must connect two nodes or two states', - }, - } - memlet = Memlet() - edge_parent.add_edge(edge_start, None, parent, None, memlet) - elif isinstance(edge_parent, SDFG): - if not (isinstance(edge_start, SDFGState) and - isinstance(parent, SDFGState)): - return { - 'error': { - 'message': 'Failed to add edge', - 'details': 'Must connect two nodes or two states', - }, - } - isedge = InterstateEdge() - edge_parent.add_edge(edge_start, parent, isedge) - uuid = ['NONE'] - else: - raise ValueError('No edge starting point provided') - - old_meta = disable_save_metadata() - new_sdfg_str = sdfg.to_json() - restore_save_metadata(old_meta) - - return { - 'sdfg': new_sdfg_str, - 'uuid': uuid, - } diff --git a/backend/dace_vscode/transformations.py b/backend/dace_vscode/transformations.py index 0c6d90c..517e83f 100644 --- a/backend/dace_vscode/transformations.py +++ b/backend/dace_vscode/transformations.py @@ -35,27 +35,36 @@ def expand_library_node(json_in): except KeyError: sdfg_id, state_id, node_id = None, None, None - if sdfg_id is None: - sdfg.expand_library_nodes() - else: - context_sdfg = sdfg.sdfg_list[sdfg_id] - state = context_sdfg.node(state_id) - node = state.node(node_id) - if isinstance(node, nodes.LibraryNode): - node.expand(context_sdfg, state) + try: + if sdfg_id is None: + sdfg.expand_library_nodes() else: - return { - 'error': { - 'message': 'Failed to expand library node', - 'details': 'The provided node is not a valid library node', - }, - } + context_sdfg = sdfg.sdfg_list[sdfg_id] + state = context_sdfg.node(state_id) + node = state.node(node_id) + if isinstance(node, nodes.LibraryNode): + node.expand(context_sdfg, state) + else: + return { + 'error': { + 'message': 'Failed to expand library node', + 'details': + 'The provided node is not a valid library node', + }, + } - new_sdfg = sdfg.to_json() - utils.restore_save_metadata(old_meta) - return { - 'sdfg': new_sdfg, - } + new_sdfg = sdfg.to_json() + utils.restore_save_metadata(old_meta) + return { + 'sdfg': new_sdfg, + } + except Exception as e: + return { + 'error': { + 'message': 'Failed to expand library node', + 'details': utils.get_exception_message(e), + }, + } def reapply_history_until(sdfg_json, index): @@ -77,9 +86,11 @@ def reapply_history_until(sdfg_json, index): history = sdfg.transformation_hist for i in range(index + 1): - transformation = history[i] - transformation._sdfg = original_sdfg.sdfg_list[transformation.sdfg_id] try: + transformation = history[i] + transformation._sdfg = original_sdfg.sdfg_list[ + transformation.sdfg_id + ] if isinstance(transformation, SubgraphTransformation): transformation._sdfg.append_transformation(transformation) transformation.apply( @@ -92,10 +103,20 @@ def reapply_history_until(sdfg_json, index): except Exception as e: print(traceback.format_exc(), file=sys.stderr) sys.stderr.flush() + hist_nr = i + 1 + hist_nr_string = str(hist_nr) + if (hist_nr - 1) % 10 == 0 and (hist_nr) != 11: + hist_nr_string += 'st' + elif (hist_nr - 2) % 10 == 0 and hist_nr != 12: + hist_nr_string += 'nd' + else: + hist_nr_string += 'th' return { 'error': { - 'message': - 'Failed to play back the transformation history', + 'message': ( + 'Failed to play back the transformation history, ' + + 'failed at ' + hist_nr_string + ' history point' + ), 'details': utils.get_exception_message(e), }, } @@ -152,17 +173,25 @@ def apply_transformation(sdfg_json, transformation_json): def add_custom_transformations(filepaths): - for xf_path in filepaths: - if not xf_path in sys.modules: - xf_module_spec = importlib.util.spec_from_file_location( - xf_path, xf_path - ) - xf_module = importlib.util.module_from_spec(xf_module_spec) - sys.modules[xf_path] = xf_module - xf_module_spec.loader.exec_module(xf_module) - return { - 'done': True, - } + try: + for xf_path in filepaths: + if not xf_path in sys.modules: + xf_module_spec = importlib.util.spec_from_file_location( + xf_path, xf_path + ) + xf_module = importlib.util.module_from_spec(xf_module_spec) + sys.modules[xf_path] = xf_module + xf_module_spec.loader.exec_module(xf_module) + return { + 'done': True, + } + except Exception as e: + return { + 'error': { + 'message': 'Failed to load custom transformation(s)', + 'details': utils.get_exception_message(e), + }, + } def get_transformations(sdfg_json, selected_elements, permissive): @@ -178,79 +207,91 @@ def get_transformations(sdfg_json, selected_elements, permissive): return loaded['error'] sdfg = loaded['sdfg'] - optimizer = SDFGOptimizer(sdfg) try: - matches = optimizer.get_pattern_matches(permissive=permissive) - except TypeError: - # Compatibility with versions older than 0.12 - matches = optimizer.get_pattern_matches(strict=not permissive) + optimizer = SDFGOptimizer(sdfg) + try: + matches = optimizer.get_pattern_matches(permissive=permissive) + except TypeError: + # Compatibility with versions older than 0.12 + matches = optimizer.get_pattern_matches(strict=not permissive) - transformations = [] - docstrings = {} - for transformation in matches: - transformations.append(transformation.to_json()) - docstrings[type(transformation).__name__] = transformation.__doc__ + transformations = [] + docstrings = {} + for transformation in matches: + transformations.append(transformation.to_json()) + docstrings[type(transformation).__name__] = transformation.__doc__ - selected_states = [ - utils.sdfg_find_state_from_element(sdfg, n) for n in selected_elements - if n['type'] == 'state' - ] - selected_nodes = [ - utils.sdfg_find_node_from_element(sdfg, n) for n in selected_elements - if n['type'] == 'node' - ] - selected_sdfg_ids = list(set(elem['sdfgId'] for elem in selected_elements)) - selected_sdfg = sdfg - if len(selected_sdfg_ids) > 1: - return { - 'transformations': transformations, - 'docstrings': docstrings, - 'warnings': 'More than one SDFG selected, ignoring subgraph', - } - elif len(selected_sdfg_ids) == 1: - selected_sdfg = sdfg.sdfg_list[selected_sdfg_ids[0]] - - subgraph = None - if len(selected_states) > 0: - subgraph = SubgraphView(selected_sdfg, selected_states) - else: - violated = False - state = None - for node in selected_nodes: - if state is None: - state = node.state - elif state != node.state: - violated = True - break - if not violated and state is not None: - subgraph = SubgraphView(state, selected_nodes) + selected_states = [ + utils.sdfg_find_state_from_element(sdfg, n) + for n in selected_elements + if n['type'] == 'state' + ] + selected_nodes = [ + utils.sdfg_find_node_from_element(sdfg, n) + for n in selected_elements + if n['type'] == 'node' + ] + selected_sdfg_ids = list( + set(elem['sdfgId'] for elem in selected_elements) + ) + selected_sdfg = sdfg + if len(selected_sdfg_ids) > 1: + return { + 'transformations': transformations, + 'docstrings': docstrings, + 'warnings': 'More than one SDFG selected, ignoring subgraph', + } + elif len(selected_sdfg_ids) == 1: + selected_sdfg = sdfg.sdfg_list[selected_sdfg_ids[0]] - if subgraph is not None: - if hasattr(SubgraphTransformation, 'extensions'): - # Compatibility with versions older than 0.12 - extensions = SubgraphTransformation.extensions() + subgraph = None + if len(selected_states) > 0: + subgraph = SubgraphView(selected_sdfg, selected_states) else: - extensions = SubgraphTransformation.subclasses_recursive() + violated = False + state = None + for node in selected_nodes: + if state is None: + state = node.state + elif state != node.state: + violated = True + break + if not violated and state is not None: + subgraph = SubgraphView(state, selected_nodes) - for xform in extensions: - # Subgraph transformations are single-state. - if len(selected_states) > 0: - continue - xform_obj = None - try: - xform_obj = xform() - xform_obj.setup_match(subgraph) - except: - # If the above method throws an exception, it might be because - # an older version of dace (<= 0.13.1) is being used - attempt - # to construct subgraph transformations using the old API. - xform_obj = xform(subgraph) - if xform_obj.can_be_applied(selected_sdfg, subgraph): - transformations.append(xform_obj.to_json()) - docstrings[xform.__name__] = xform_obj.__doc__ + if subgraph is not None: + if hasattr(SubgraphTransformation, 'extensions'): + # Compatibility with versions older than 0.12 + extensions = SubgraphTransformation.extensions() + else: + extensions = SubgraphTransformation.subclasses_recursive() - utils.restore_save_metadata(old_meta) - return { - 'transformations': transformations, - 'docstrings': docstrings, - } + for xform in extensions: + # Subgraph transformations are single-state. + if len(selected_states) > 0: + continue + xform_obj = None + try: + xform_obj = xform() + xform_obj.setup_match(subgraph) + except: + # If the above method throws an exception, it might be because + # an older version of dace (<= 0.13.1) is being used - attempt + # to construct subgraph transformations using the old API. + xform_obj = xform(subgraph) + if xform_obj.can_be_applied(selected_sdfg, subgraph): + transformations.append(xform_obj.to_json()) + docstrings[xform.__name__] = xform_obj.__doc__ + + utils.restore_save_metadata(old_meta) + return { + 'transformations': transformations, + 'docstrings': docstrings, + } + except Exception as e: + return { + 'error': { + 'message': 'Failed to load transformations', + 'details': utils.get_exception_message(e), + }, + } diff --git a/backend/run_dace.py b/backend/run_dace.py index 89adf3b..3ebde79 100644 --- a/backend/run_dace.py +++ b/backend/run_dace.py @@ -3,7 +3,6 @@ ##################################################################### # Before importing anything, try to take the ".env" file into account -import json import os import re import sys @@ -54,12 +53,13 @@ load_sdfg_from_file, disable_save_metadata, restore_save_metadata, + get_exception_message, ) -from dace_vscode import transformations, editing, arith_ops +from dace_vscode import transformations, arith_ops meta_dict = {} -def get_property_metdata(force_regenerate=False): +def get_property_metadata(force_regenerate=False): """ Generate a dictionary of class properties and their metadata. This iterates over all classes registered as serializable in DaCe's serialization module, checks whether there are properties present @@ -270,15 +270,32 @@ def compile_sdfg(path, suppress_instrumentation=False): return loaded['error'] sdfg = loaded['sdfg'] - if suppress_instrumentation: - _sdfg_remove_instrumentations(sdfg) + try: + if suppress_instrumentation: + _sdfg_remove_instrumentations(sdfg) + except Exception as e: + return { + 'error': { + 'message': ('Failed to remove instrumentation from SDFG ' + + 'for compiling'), + 'details': get_exception_message(e), + }, + } - compiled_sdfg: CompiledSDFG = sdfg.compile() + try: + compiled_sdfg: CompiledSDFG = sdfg.compile() - restore_save_metadata(old_meta) - return { - 'filename': compiled_sdfg.filename, - } + restore_save_metadata(old_meta) + return { + 'filename': compiled_sdfg.filename, + } + except Exception as e: + return { + 'error': { + 'message': 'Failed to compile SDFG', + 'details': get_exception_message(e), + }, + } def specialize_sdfg(path, symbol_map, remove_undef=True): @@ -289,25 +306,34 @@ def specialize_sdfg(path, symbol_map, remove_undef=True): return loaded['error'] sdfg: dace.sdfg.SDFG = loaded['sdfg'] - sdfg.specialize(symbol_map) + try: + cleaned_map = { k: int(v) for k, v in symbol_map.items() } + sdfg.specialize(cleaned_map) - # Remove any constants that are not defined anymore in the symbol map, if - # the remove_undef flag is set. - if remove_undef: - delkeys = set() - for key in sdfg.constants_prop: - if (key not in symbol_map or symbol_map[key] is None or - symbol_map[key] == 0): - delkeys.add(key) - for key in delkeys: - del sdfg.constants_prop[key] + # Remove any constants that are not defined anymore in the symbol map, + # if the remove_undef flag is set. + if remove_undef: + delkeys = set() + for key in sdfg.constants_prop: + if (key not in symbol_map or symbol_map[key] is None or + symbol_map[key] == 0): + delkeys.add(key) + for key in delkeys: + del sdfg.constants_prop[key] - ret_sdfg = sdfg.to_json() + ret_sdfg = sdfg.to_json() - restore_save_metadata(old_meta) - return { - 'sdfg': ret_sdfg, - } + restore_save_metadata(old_meta) + return { + 'sdfg': ret_sdfg, + } + except Exception as e: + return { + 'error': { + 'message': 'Failed to specialize SDFG', + 'details': get_exception_message(e), + }, + } def run_daemon(port): @@ -390,23 +416,9 @@ def _specialize_sdfg(): request_json = request.get_json() return specialize_sdfg(request_json['path'], request_json['symbol_map']) - @daemon.route('/insert_sdfg_element', methods=['POST']) - def _insert_sdfg_element(): - request_json = request.get_json() - return editing.insert_sdfg_element(request_json['sdfg'], - request_json['type'], - request_json['parent'], - request_json['edge_a']) - - @daemon.route('/remove_sdfg_elements', methods=['POST']) - def _remove_sdfg_elements(): - request_json = request.get_json() - return editing.remove_sdfg_elements(request_json['sdfg'], - request_json['uuids']) - @daemon.route('/get_metadata', methods=['GET']) def _get_metadata(): - return get_property_metdata() + return get_property_metadata() daemon.run(port=port) diff --git a/media/components/sdfv/index.html b/media/components/sdfv/index.html index bae6fdf..2fc684c 100644 --- a/media/components/sdfv/index.html +++ b/media/components/sdfv/index.html @@ -58,7 +58,7 @@
-