Skip to content

Commit

Permalink
fix write_parameters types
Browse files Browse the repository at this point in the history
  • Loading branch information
Christian-B committed Dec 20, 2023
1 parent a4ff059 commit 5974e8c
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,8 @@ def n_weight_terms(self):

@overrides(AbstractTimingDependence.write_parameters)
def write_parameters(
self, spec, global_weight_scale, synapse_weight_scales):
self, spec: DataSpecificationBase, global_weight_scale: float,
synapse_weight_scales: NDArray[floating]):
# TODO: update to write the parameters
spec.write_value(
self._my_potentiation_parameter, data_type=DataType.S1615)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
from numpy import floating
from numpy.typing import NDArray
from spinn_utilities.overrides import overrides
from spinn_front_end_common.interface.ds import DataType
from spinn_front_end_common.interface.ds import (
DataSpecificationBase, DataType)
from spinn_front_end_common.utilities.constants import BYTES_PER_WORD
from spynnaker.pyNN.models.neuron.plasticity.stdp.weight_dependence import (
AbstractWeightDependence, AbstractHasAPlusAMinus)
Expand Down Expand Up @@ -88,8 +91,8 @@ def get_parameters_sdram_usage_in_bytes(

@overrides(AbstractWeightDependence.write_parameters)
def write_parameters(
self, spec, global_weight_scale, synapse_weight_scales,
n_weight_terms):
self, spec: DataSpecificationBase, global_weight_scale: float,
synapse_weight_scales: NDArray[floating], n_weight_terms: int):
# TODO: update to write the parameters
# Loop through each synapse type's weight scale
for w in synapse_weight_scales:
Expand Down

0 comments on commit 5974e8c

Please sign in to comment.