Skip to content

Commit

Permalink
Merge pull request #70 from vyomakesh09/master
Browse files Browse the repository at this point in the history
[mod]
  • Loading branch information
kyegomez authored Dec 28, 2023
2 parents d5ff72b + ca2a9ee commit eb827d8
Show file tree
Hide file tree
Showing 6 changed files with 50 additions and 41 deletions.
19 changes: 19 additions & 0 deletions scripts/delpycache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import os
import shutil
import sys


def delete_pycache(directory):
for root, dirs, files in os.walk(directory):
if "__pycache__" in dirs:
shutil.rmtree(os.path.join(root, "__pycache__"))


if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: python delete_pycache.py <directory>")
sys.exit(1)

directory = sys.argv[1]
delete_pycache(directory)
print(f"__pycache__ directories deleted in {directory}")
Empty file added tests/__init__.py
Empty file.
7 changes: 0 additions & 7 deletions tests/models/test_navit.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import pytest
import torch
from zeta.models import NaViT
from torch.nn.modules.module import ModuleAttributeError
from torch.nn import Sequential


Expand Down Expand Up @@ -72,10 +71,4 @@ def test_token_dropout(neural_network_template):
assert callable(model.calc_token_dropout)


# Test if exceptions are thrown when they should be
def test_exceptions(neural_network_template):
with pytest.raises(ModuleAttributeError):
_ = neural_network_template.non_existent_attribute


# add your test cases here..
8 changes: 4 additions & 4 deletions tests/nn/modules/test_linearactivation.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,14 @@ def test_LinearActivation_init():
"input_tensor", [(torch.tensor([1, 2, 3])), (torch.tensor([-1, 0, 1]))]
)
def test_LinearActivation_forward(input_tensor):
"""Test if the forward method of LinearActivation class retruns the same input tensor."""
"""Test if the forward method of LinearActivation class returns the same input tensor."""
act = LinearActivation()
assert torch.equal(act.forward(input_tensor), input_tensor)


@pytest.mark.parametrize("input_tensor", [(torch.tensor([1, 2, "a"]))])
def test_LinearActivation_forward_error(input_tensor):
def test_LinearActivation_forward_error():
"""Test if the forward method of LinearActivation class raises an error when input tensor is not valid."""
act = LinearActivation()
with pytest.raises(TypeError):
act.forward(input_tensor)
invalid_input = [1, 2, "a"]
act.forward(torch.tensor(invalid_input))
3 changes: 2 additions & 1 deletion tests/structs/test_transformer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import pytest
import torch
from zeta.structs import Transformer, AttentionLayers
from zeta.structs import Transformer
from zeta.structs.transformer import AttentionLayers

# assuming that you are testing the Transformer class

Expand Down
54 changes: 25 additions & 29 deletions zeta/nn/modules/test_dense_connect.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,36 @@
import torch
import torch.nn as nn
import unittest

import pytest
from zeta.nn.modules.dense_connect import DenseBlock


class DenseBlockTestCase(unittest.TestCase):
def setUp(self):
self.submodule = nn.Linear(10, 5)
self.dense_block = DenseBlock(self.submodule)
@pytest.fixture
def dense_block():
submodule = nn.Linear(10, 5)
return DenseBlock(submodule)


def test_forward(self):
x = torch.randn(32, 10)
output = self.dense_block(x)
def test_forward(dense_block):
x = torch.randn(32, 10)
output = dense_block(x)

self.assertEqual(output.shape, (32, 15)) # Check output shape
self.assertTrue(
torch.allclose(output[:, :10], x)
) # Check if input is preserved
self.assertTrue(
torch.allclose(output[:, 10:], self.submodule(x))
) # Check submodule output
assert output.shape == (32, 15) # Check output shape
assert torch.allclose(output[:, :10], x) # Check if input is preserved
assert torch.allclose(
output[:, 10:], dense_block.submodule(x)
) # Check submodule output

def test_initialization(self):
self.assertEqual(
self.dense_block.submodule, self.submodule
) # Check submodule assignment

def test_docstrings(self):
self.assertIsNotNone(
DenseBlock.__init__.__doc__
) # Check if __init__ has a docstring
self.assertIsNotNone(
DenseBlock.forward.__doc__
) # Check if forward has a docstring
def test_initialization(dense_block):
assert isinstance(dense_block.submodule, nn.Linear) # Check submodule type
assert dense_block.submodule.in_features == 10 # Check input features
assert dense_block.submodule.out_features == 5 # Check output features


if __name__ == "__main__":
unittest.main()
def test_docstrings():
assert (
DenseBlock.__init__.__doc__ is not None
) # Check if __init__ has a docstring
assert (
DenseBlock.forward.__doc__ is not None
) # Check if forward has a docstring

0 comments on commit eb827d8

Please sign in to comment.