Skip to content

Commit

Permalink
fix: compress key always
Browse files Browse the repository at this point in the history
  • Loading branch information
andrei-stoian-zama committed Jun 13, 2024
1 parent 3703f73 commit d866668
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 0 deletions.
2 changes: 2 additions & 0 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ def default_configuration():
fhe_simulation=False,
fhe_execution=True,
compress_input_ciphertexts=os.environ.get("USE_INPUT_COMPRESSION", "1") == "1",
compress_eval_keys=True,
)


Expand All @@ -173,6 +174,7 @@ def simulation_configuration():
fhe_simulation=True,
fhe_execution=False,
compress_input_ciphertexts=os.environ.get("USE_INPUT_COMPRESSION", "1") == "1",
compress_eval_keys=True,
)


Expand Down
4 changes: 4 additions & 0 deletions docker/release_resources/sanity_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ def ml_check(args, keyring_dir_as_str):
enable_unsafe_features=True,
use_insecure_key_cache=is_fast,
insecure_key_cache_location=keyring_dir_as_str,
compress_input_ciphertexts=True,
compress_eval_keys=True,
)

# We first compile the model with some data, here the training set
Expand Down Expand Up @@ -120,6 +122,8 @@ def function_to_compile(x):
enable_unsafe_features=is_fast,
use_insecure_key_cache=is_fast,
insecure_key_cache_location=keyring_dir_as_str,
compress_input_ciphertexts=True,
compress_eval_keys=True,
)

print("Compiling...")
Expand Down
2 changes: 2 additions & 0 deletions tests/torch/test_hybrid_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ def run_hybrid_llm_test(
# Multi-parameter strategy is used in order to speed-up the FHE executions
configuration = Configuration(
single_precision=False,
compress_input_ciphertexts=True,
compress_eval_keys=True,
)

# Create a hybrid model
Expand Down

0 comments on commit d866668

Please sign in to comment.