Skip to content

Commit

Permalink
Merge pull request #368 from gkumbhat/update_torch_2_3_1
Browse files Browse the repository at this point in the history
📦 Update torch to 2.3.1
  • Loading branch information
gkumbhat authored Jul 17, 2024
2 parents 8f71845 + b4e5108 commit 363c425
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 8 deletions.
2 changes: 0 additions & 2 deletions caikit_nlp/toolkit/torch_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@

# Third Party
from torch import cuda
from torch.distributed.elastic.multiprocessing.api import Std
from torch.distributed.launcher.api import LaunchConfig
import torch.distributed as dist

Expand Down Expand Up @@ -100,6 +99,5 @@ def get_torch_elastic_launch_config(
rdzv_backend="static",
rdzv_endpoint=f"{master_addr}:{master_port}",
rdzv_configs=rdzv_configs,
tee=Std.ALL,
max_restarts=max_restarts,
)
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ dependencies = [
"scipy>=1.8.1",
"sentence-transformers>=2.3.1,<2.4.0",
"tokenizers>=0.13.3",
"torch>=2.2.2,<2.3.0",
"torch>=2.3.1,<2.4.0",
"tqdm>=4.65.0",
"transformers>=4.32.0",
"peft==0.6.0",
Expand Down
12 changes: 7 additions & 5 deletions tests/modules/text_embedding/test_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -1018,7 +1018,7 @@ def test_encoding_order(loaded_model: EmbeddingModule, truncate_input_tokens):

# test order by comparing value of individual embeddings in sequence
for i, e in enumerate(separate_vectors):
assert np.allclose(e, combined_vectors[i])
assert np.allclose(e, combined_vectors[i], rtol=1e-03, atol=1e-05)

# test expected failure case by reordering
shifted_separate_vectors = separate_vectors[1:] + [separate_vectors[0]]
Expand All @@ -1029,7 +1029,7 @@ def test_encoding_order(loaded_model: EmbeddingModule, truncate_input_tokens):
not approx(e) == combined_vectors[i]
), "expected altered order to not match combined vectors"
assert not np.allclose(
e, combined_vectors[i]
e, combined_vectors[i], rtol=1e-05, atol=1e-08
), "expected altered order to not match combined"


Expand Down Expand Up @@ -1105,7 +1105,9 @@ def test_same_same(loaded_model: EmbeddingModule, truncate_input_tokens):
assert np.allclose(e, combined_vectors[i])

# Next ensuring that the two identical sentences yield identical results (and 3rd does not)
assert np.array_equal(combined_vectors[0], combined_vectors[1])
assert np.allclose(combined_vectors[0], combined_vectors[1], rtol=1e-05, atol=1e-08)
assert not np.array_equal(combined_vectors[1], combined_vectors[2])
assert np.array_equal(separate_vectors[0], separate_vectors[1])
assert not np.array_equal(separate_vectors[1], separate_vectors[2])
assert np.allclose(separate_vectors[0], separate_vectors[1], rtol=1e-05, atol=1e-08)
assert not np.allclose(
separate_vectors[1], separate_vectors[2], rtol=1e-05, atol=1e-08
)

0 comments on commit 363c425

Please sign in to comment.