-
-
Notifications
You must be signed in to change notification settings - Fork 48
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Kye Gomez
authored and
Kye Gomez
committed
Jul 23, 2024
1 parent
4ff5d90
commit 295c4f1
Showing
3 changed files
with
58 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
import torch | ||
from transformers import T5Tokenizer, T5EncoderModel | ||
from loguru import logger | ||
|
||
|
||
class PretrainedT5Embedder: | ||
def __init__(self, model_name: str = "t5-small", *args, **kwargs): | ||
""" | ||
Initializes the PretrainedT5Embedder with a specified T5 model. | ||
Args: | ||
model_name (str): The name of the pre-trained T5 model to use. | ||
""" | ||
logger.info( | ||
f"Initializing the T5 tokenizer and model with {model_name}." | ||
) | ||
self.tokenizer = T5Tokenizer.from_pretrained(model_name) | ||
self.model = T5EncoderModel.from_pretrained(model_name, *args, **kwargs) | ||
|
||
def run(self, text: str, *args, **kwargs) -> torch.Tensor: | ||
""" | ||
Encodes the input text using the T5 model and returns the embeddings. | ||
Args: | ||
text (str): The input text to be embedded. | ||
Returns: | ||
torch.Tensor: The embedded representation of the input text. | ||
""" | ||
logger.info(f"Encoding the text: {text}") | ||
inputs = self.tokenizer( | ||
text, return_tensors="pt", padding=True, truncation=True | ||
) | ||
with torch.no_grad(): | ||
outputs = self.model(**inputs) | ||
embeddings = outputs.last_hidden_state.mean(dim=1) | ||
logger.info("Text successfully embedded.") | ||
return embeddings |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
import torch | ||
import torch.nn as nn | ||
|
||
|
||
class Snake(nn.Module): | ||
def __init__(self, alpha: float = 1.0): | ||
super(Snake, self).__init__() | ||
self.alpha = nn.Parameter(torch.tensor(alpha)) | ||
|
||
def forward(self, x): | ||
return x + (1 / self.alpha) * torch.sin(self.alpha * x) ** 2 | ||
|
||
|
||
# # Example usage | ||
# snake = Snake() | ||
# x = torch.randn(10, 100, 100) # Example input tensor | ||
# output = snake(x) | ||
# print(output) |