Source code for spynnaker.pyNN.models.neuron.plasticity.stdp.weight_dependence.weight_dependence_additive_triplet
# Copyright (c) 2017 The University of Manchester
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Iterable
from numpy import floating
from numpy.typing import NDArray
from spinn_utilities.overrides import overrides
from spinn_front_end_common.interface.ds import (
DataType, DataSpecificationBase)
from spinn_front_end_common.utilities.constants import BYTES_PER_WORD
from .abstract_has_a_plus_a_minus import AbstractHasAPlusAMinus
from .abstract_weight_dependence import AbstractWeightDependence
# Six words per synapse type
_SPACE_PER_SYNAPSE_TYPE = 6 * BYTES_PER_WORD
class WeightDependenceAdditiveTriplet(
AbstractHasAPlusAMinus, AbstractWeightDependence):
"""
An triplet-based additive weight dependence STDP rule.
"""
__slots__ = (
"__a3_minus",
"__a3_plus",
"__w_max",
"__w_min")
__PARAM_NAMES = ('w_min', 'w_max', 'A3_plus', 'A3_minus')
default_parameters = {'w_min': 0.0, 'w_max': 1.0, 'A3_plus': 0.01,
'A3_minus': 0.01}
# noinspection PyPep8Naming
def __init__(
self, w_min: float = default_parameters['w_min'],
w_max: float = default_parameters['w_max'],
A3_plus: float = default_parameters['A3_plus'],
A3_minus: float = default_parameters['A3_minus']):
"""
:param float w_min: :math:`w^{min}`
:param float w_max: :math:`w^{max}`
:param float A3_plus: :math:`A_3^+`
:param float A3_minus: :math:`A_3^-`
"""
super().__init__()
self.__w_min = w_min
self.__w_max = w_max
self.__a3_plus = A3_plus
self.__a3_minus = A3_minus
@property
def w_min(self) -> float:
"""
:math:`w^{min}`
:rtype: float
"""
return self.__w_min
@property
def w_max(self) -> float:
"""
:math:`w^{max}`
:rtype: float
"""
return self.__w_max
@property
def A3_plus(self) -> float:
"""
:math:`A_3^+`
:rtype: float
"""
# pylint: disable=invalid-name
return self.__a3_plus
@property
def A3_minus(self) -> float:
"""
:math:`A_3^-`
:rtype: float
"""
# pylint: disable=invalid-name
return self.__a3_minus
[docs]
@overrides(AbstractWeightDependence.is_same_as)
def is_same_as(self, weight_dependence: AbstractWeightDependence) -> bool:
if not isinstance(weight_dependence, WeightDependenceAdditiveTriplet):
return False
return (
(self.__w_min == weight_dependence.w_min) and
(self.__w_max == weight_dependence.w_max) and
(self.A_plus == weight_dependence.A_plus) and
(self.A_minus == weight_dependence.A_minus) and
(self.__a3_plus == weight_dependence.A3_plus) and
(self.__a3_minus == weight_dependence.A3_minus))
@property
def vertex_executable_suffix(self) -> str:
"""
The suffix to be appended to the vertex executable for this rule.
:rtype: str
"""
return "additive"
[docs]
@overrides(AbstractWeightDependence.get_parameters_sdram_usage_in_bytes)
def get_parameters_sdram_usage_in_bytes(
self, n_synapse_types: int, n_weight_terms: int) -> int:
if n_weight_terms != 2:
raise NotImplementedError(
"Additive weight dependence only supports one or two terms")
return _SPACE_PER_SYNAPSE_TYPE * n_synapse_types
[docs]
@overrides(AbstractWeightDependence.write_parameters)
def write_parameters(
self, spec: DataSpecificationBase, global_weight_scale: float,
synapse_weight_scales: NDArray[floating], n_weight_terms: int):
# Loop through each synapse type
for _ in synapse_weight_scales:
# Scale the weights
spec.write_value(data=self.__w_min * global_weight_scale,
data_type=DataType.S1615)
spec.write_value(data=self.__w_max * global_weight_scale,
data_type=DataType.S1615)
# pylint: disable=wrong-spelling-in-comment
# Based on http://data.andrewdavison.info/docs/PyNN/_modules/pyNN
# /standardmodels/synapses.html
# Pre-multiply A+ and A- by Wmax
spec.write_value(
data=self.A_plus * self.__w_max * global_weight_scale,
data_type=DataType.S1615)
spec.write_value(
data=self.A_minus * self.__w_max * global_weight_scale,
data_type=DataType.S1615)
spec.write_value(
data=self.__a3_plus * self.__w_max * global_weight_scale,
data_type=DataType.S1615)
spec.write_value(
data=self.__a3_minus * self.__w_max * global_weight_scale,
data_type=DataType.S1615)
@property
def weight_maximum(self) -> float:
"""
The maximum weight that will ever be set in a synapse as a result
of this rule.
:rtype: float
"""
return self.__w_max
[docs]
@overrides(AbstractWeightDependence.get_parameter_names)
def get_parameter_names(self) -> Iterable[str]:
return self.__PARAM_NAMES