From b8022d42e220a41a79bbf6cea9b74e34b9cfc3c0 Mon Sep 17 00:00:00 2001 From: satabol Date: Sun, 29 Oct 2023 02:15:26 +0300 Subject: [PATCH] fix #5034. Apply Vector Field Node performance proposal solution --- nodes/field/vector_field_apply.py | 14 +++ utils/curve/nurbs.py | 26 +++++ utils/field/vector.py | 5 +- utils/surface/algorithms.py | 160 +++++++++++++++++++++++------- 4 files changed, 165 insertions(+), 40 deletions(-) diff --git a/nodes/field/vector_field_apply.py b/nodes/field/vector_field_apply.py index faba624bc5..fb9d07c041 100644 --- a/nodes/field/vector_field_apply.py +++ b/nodes/field/vector_field_apply.py @@ -3,6 +3,7 @@ import bpy from bpy.props import FloatProperty, EnumProperty, BoolProperty, IntProperty, StringProperty +from datetime import datetime from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import updateNode, zip_long_repeat, repeat_last_for_length, ensure_nesting_level @@ -54,6 +55,8 @@ def process(self): if not any(socket.is_linked for socket in self.outputs): return + t0 = datetime.now()-datetime.now() + dt0 = datetime.now() vertices_s = self.inputs['Vertices'].sv_get() coeffs_s = self.inputs['Coefficient'].sv_get() fields_s = self.inputs['Field'].sv_get() @@ -62,6 +65,8 @@ def process(self): vertices_s = ensure_nesting_level(vertices_s, 4) coeffs_s = ensure_nesting_level(coeffs_s, 3) fields_s = ensure_nesting_level(fields_s, 2, data_types=(SvVectorField,)) + dt0 = datetime.now()-dt0 + t0 = t0+dt0 verts_out = [] for fields, vertices_l, coeffs_l, iterations_l in zip_long_repeat(fields_s, vertices_s, coeffs_s, iterations_s): @@ -82,9 +87,15 @@ def process(self): vertex = (np.array(vertex) + coeff * vector).tolist() new_verts = [vertex] else: + t0 = datetime.now()-datetime.now() + dt0 = datetime.now() coeffs = repeat_last_for_length(coeffs, len(vertices)) vertices = np.array(vertices) + dt0 = datetime.now()-dt0 + t0 = t0+dt0 + for i in range(iterations): + dt0 = datetime.now() xs = vertices[:,0] ys = vertices[:,1] zs = vertices[:,2] @@ -92,7 +103,10 @@ def process(self): new_vectors = np.dstack((new_xs[:], new_ys[:], new_zs[:])) new_vectors = np.array(coeffs)[np.newaxis].T * new_vectors[0] vertices = vertices + new_vectors + dt0 = datetime.now()-dt0 + t0 = t0+dt0 new_verts = vertices if self.output_numpy else vertices.tolist() + print(f'process t0={t0}') verts_out.append(new_verts) diff --git a/utils/curve/nurbs.py b/utils/curve/nurbs.py index 3201fc5505..cceb881295 100644 --- a/utils/curve/nurbs.py +++ b/utils/curve/nurbs.py @@ -1152,6 +1152,9 @@ def evaluate(self, t): return np.array([0,0,0]) else: return numerator / denominator + # numerator, denominator = self.fraction_single_v01(0, t) + # res = np.where( denominator.reshape(-1,1)==0.0, np.repeat( np.array( [[0,0,0]] ), denominator.size, axis=0), numerator/denominator[:,np.newaxis] ) + # return res def fraction(self, deriv_order, ts): n = len(ts) @@ -1166,6 +1169,18 @@ def fraction(self, deriv_order, ts): return numerator, denominator[np.newaxis].T + # def fraction_single_v01(self, deriv_order, t): + # p = self.degree + # k = len(self.control_points) + # ts = np.array([t]) if not hasattr(t, '__len__') else np.array(t) + # ns = np.array([self.basis.derivative(i, p, deriv_order)(ts) for i in range(k)]) # (k,) + # coeffs = ns * self.weights[np.newaxis].T # (k, ) + # coeffs_t = coeffs.T + # numerator = np.transpose((np.expand_dims(coeffs, axis=2) * np.expand_dims(self.control_points, axis=1)), (1,0,2) ) # (k, n, 3) + # numerator = numerator.sum(axis=1) # (3,n) + # denominator = coeffs.sum(axis=0) # () + # return numerator, denominator + def fraction_single(self, deriv_order, t): p = self.degree k = len(self.control_points) @@ -1190,6 +1205,17 @@ def evaluate_array(self, ts): # if (denominator == 0).any(): # print("Num:", numerator) # print("Denom:", denominator) + + # deriv_order = 0 + # p = self.degree + # k = len(self.control_points) + # ts = np.array(ts) + # ns = np.array([self.basis.derivative(i, p, deriv_order)(ts) for i in range(k)]) # (k,) + # coeffs = ns * self.weights[np.newaxis].T # (k, ) + # numerator = np.transpose((np.expand_dims(coeffs, axis=2) * np.expand_dims(self.control_points, axis=1)), (1,0,2) ) # (k, n, 3) + # numerator = numerator.sum(axis=1) # (3,n) + # denominator = coeffs.sum(axis=0) # () + return nurbs_divide(numerator, denominator) def tangent(self, t, tangent_delta=None): diff --git a/utils/field/vector.py b/utils/field/vector.py index 22fa5e0981..cda073760e 100644 --- a/utils/field/vector.py +++ b/utils/field/vector.py @@ -1044,8 +1044,9 @@ def _evaluate(self, vertices): if self.only_2D: return self.surface.evaluate_array(us, vs) - surf_vertices = self.surface.evaluate_array(us, vs) - spline_normals = self.surface.normal_array(us, vs) + #surf_vertices = self.surface.evaluate_array(us, vs) + #spline_normals = self.surface.normal_array(us, vs) + spline_normals, surf_vertices = self.surface.normal_array_with_source_vertices(us, vs) zs = vertices[:,self.orient_axis].flatten() zs = zs[np.newaxis].T v1 = zs * spline_normals diff --git a/utils/surface/algorithms.py b/utils/surface/algorithms.py index 74a2f73b53..d47ac80a64 100644 --- a/utils/surface/algorithms.py +++ b/utils/surface/algorithms.py @@ -9,6 +9,8 @@ from collections import defaultdict from mathutils import Matrix, Vector +from datetime import datetime + from sverchok.utils.math import ( ZERO, FRENET, HOUSEHOLDER, TRACK, DIFF, TRACK_NORMAL, @@ -159,59 +161,141 @@ def normal(self, u, v): # return np.array(normals) def normal_array(self, us, vs): + result_normals, *_ = self.normal_array_with_source_vertices(us, vs) + return result_normals + + def normal_array_with_source_vertices(self, us, vs): h = 0.001 - result = np.empty((len(us), 3)) + t7=datetime.now() + _points = np.empty( (0, 3), dtype=np.float64) + _points_u_h = np.empty( (0, 3), dtype=np.float64) + _points_v_h = np.empty( (0, 3), dtype=np.float64) v_to_u = defaultdict(list) v_to_i = defaultdict(list) for i, (u, v) in enumerate(zip(us, vs)): v_to_u[v].append(u) v_to_i[v].append(i) - for v, us_by_v in v_to_u.items(): - us_by_v = np.array(us_by_v) - is_by_v = v_to_i[v] + v_to_i_flatten = np.hstack(np.array( list(v_to_i.values())).flatten()) + t7 = datetime.now()-t7 + t1 = datetime.now()-datetime.now() + t2 = datetime.now()-datetime.now() + t3 = datetime.now()-datetime.now() + t5 = datetime.now()-datetime.now() + t6 = datetime.now()-datetime.now() + t8 = datetime.now()-datetime.now() + + t4 = datetime.now() + list_spline_v = [] + list_spline_h = [] + _v = np.array( list(v_to_u.keys()), dtype=np.float64 ) + for i_spline, v_spline in enumerate(self.v_splines): + v_min, v_max = v_spline.get_u_bounds() + _vx = (v_max-v_min)*_v+v_min + _list_v_i = np.where( _vx+h