diff --git a/alphafold3/model.py b/alphafold3/model.py index 69b07e6..acb1a2e 100644 --- a/alphafold3/model.py +++ b/alphafold3/model.py @@ -69,7 +69,7 @@ def __init__( ) # Norm - self.norm = nn.Layernorm(dim) + self.norm = nn.LayerNorm(dim) def forward( self, diff --git a/alphafold3/template_embedder.py b/alphafold3/template_embedder.py index 0752818..8db3e11 100644 --- a/alphafold3/template_embedder.py +++ b/alphafold3/template_embedder.py @@ -5,19 +5,19 @@ class TemplateEmbedder(nn.Module): def __init__( - self, + self, dim: int = None, - depth: int = 2, + depth: int = 2, seq_len: int = None, heads: int = 64, dim_head: int = 64, attn_dropout: float = 0.0, ff_dropout: float = 0.0, global_column_attn: bool = False, - c: int = 64, + c: int = 64, Ntemplates: int = 1, *args, - **kwargs + **kwargs, ): super(TemplateEmbedder, self).__init__() # Define layers used in the embedding @@ -34,7 +34,7 @@ def __init__( ff_dropout=ff_dropout, depth=depth, *args, - **kwargs + **kwargs, ) self.relu = nn.ReLU() self.final_linear = nn.Linear(c, c, bias=False) diff --git a/pyproject.toml b/pyproject.toml index 7d4c7fc..c41e627 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "alphafold3" -version = "0.0.5" +version = "0.0.8" description = "Paper - Pytorch" license = "MIT" authors = ["Kye Gomez "] diff --git a/tests/test_template_embedder.py b/tests/test_template_embedder.py new file mode 100644 index 0000000..e69de29