Skip to content

Commit

Permalink
minor changes
Browse files Browse the repository at this point in the history
  • Loading branch information
AkiRusProd committed Jun 14, 2024
1 parent 8be6a65 commit dd50a08
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 11 deletions.
15 changes: 8 additions & 7 deletions neunet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,25 +32,25 @@ def tensor(data, requires_grad=False, dtype=float32, device="cpu"):
return Tensor(data, requires_grad=requires_grad, dtype=dtype, device=device)


def ones(*shape, dtype=None, requires_grad=True, device="cpu"):
def ones(*shape, dtype=None, requires_grad=False, device="cpu"):
shape = tuple(*shape) if all(isinstance(arg, (list, tuple)) for arg in shape) else shape

return Tensor(np.ones(shape, dtype=dtype), requires_grad=requires_grad, device=device)


def zeros(*shape, dtype=None, requires_grad=True, device="cpu"):
def zeros(*shape, dtype=None, requires_grad=False, device="cpu"):
shape = tuple(*shape) if all(isinstance(arg, (list, tuple)) for arg in shape) else shape

return Tensor(np.zeros(shape, dtype=dtype), requires_grad=requires_grad, device=device)


def rand(*shape, dtype=None, requires_grad=True, device="cpu"):
def rand(*shape, dtype=None, requires_grad=False, device="cpu"):
shape = tuple(*shape) if all(isinstance(arg, (list, tuple)) for arg in shape) else shape

return Tensor(np.random.rand(*shape).astype(dtype), requires_grad=requires_grad, device=device)


def randn(*shape, dtype=None, requires_grad=True, device="cpu"):
def randn(*shape, dtype=None, requires_grad=False, device="cpu"):
shape = tuple(*shape) if all(isinstance(arg, (list, tuple)) for arg in shape) else shape

return Tensor(
Expand All @@ -60,7 +60,7 @@ def randn(*shape, dtype=None, requires_grad=True, device="cpu"):
)


def arange(start=0, end=None, step=1, dtype=None, requires_grad=True, device="cpu"):
def arange(start=0, end=None, step=1, dtype=None, requires_grad=False, device="cpu"):
if end is None:
start, end = 0, start
return Tensor(
Expand All @@ -70,11 +70,11 @@ def arange(start=0, end=None, step=1, dtype=None, requires_grad=True, device="cp
)


def ones_like(tensor, dtype=None, requires_grad=True, device="cpu"):
def ones_like(tensor, dtype=None, requires_grad=False, device="cpu"):
return Tensor(np.ones_like(tensor.data, dtype), requires_grad=requires_grad, device=device)


def zeros_like(tensor, dtype=None, requires_grad=True, device="cpu"):
def zeros_like(tensor, dtype=None, requires_grad=False, device="cpu"):
return Tensor(np.zeros_like(tensor.data, dtype), requires_grad=requires_grad, device=device)


Expand Down Expand Up @@ -195,6 +195,7 @@ def flip(x, axis):
return x.flip(axis=axis)

def where(condition, x, y):
x = tensor(x, device=condition.device) if not isinstance(x, Tensor) else x
return x.where(condition, y)

def equal(x, y):
Expand Down
13 changes: 10 additions & 3 deletions neunet/autograd.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ def flip(self, axis: Any) -> 'Tensor':
device=self.device,
)

def where(self, condition: Union[Any, 'Tensor'], t: Union[Any, 'Tensor']) -> 'Tensor':
def where(self, condition: 'Tensor', t: Union[Any, 'Tensor']) -> 'Tensor':
condition = self.tensor(condition)
t = self.tensor(t)

Expand Down Expand Up @@ -586,8 +586,15 @@ def __getitem__(
"getitem",
requires_grad=self.requires_grad,
device=self.device,
dtype=self.dtype
)

def __setitem__(self, key, value: Union[Any, 'Tensor']):
if self.requires_grad:
raise RuntimeError("Cannot assign values to a tensor with requires_grad=True")
value = self.tensor(value)
self.data[key] = value.data

def __array__(self, dtype: Any=None) -> np.ndarray:
return self.data.astype(dtype, copy=False)

Expand Down Expand Up @@ -809,8 +816,8 @@ def backward(
self.args[0].backward(self.xp.flip(grad, axis=self.args[1]))

elif self.op == "where":
self.args[0].backward(grad * self.xp.where(self.args[1].data, grad, self.xp.zeros_like(grad)))
self.args[2].backward(grad * self.xp.where(self.args[1].data, self.xp.zeros_like(grad), grad))
self.args[0].backward(grad * self.xp.where(self.args[1].data, grad, self.xp.zeros_like(grad)))
self.args[2].backward(grad * self.xp.where(self.args[1].data, self.xp.zeros_like(grad), grad))

elif self.op == "neg":
self.args[0].backward(-grad)
Expand Down
2 changes: 1 addition & 1 deletion neunet/nn/layers/dropout.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def forward(self, X: Tensor) -> Tensor:

if self.training:
self.mask = (
X.xp.random.binomial(1, 1 - self.p, size=X.data.shape, dtype=X.data.dtype)
X.xp.random.binomial(1, 1 - self.p, size=X.data.shape).astype(X.data.dtype)
* self.scale
)
else:
Expand Down

0 comments on commit dd50a08

Please sign in to comment.