Skip to content

Commit

Permalink
chore: updating the CP version
Browse files Browse the repository at this point in the history
  • Loading branch information
bcm-at-zama authored Sep 28, 2023
1 parent da85d43 commit 274f0e4
Show file tree
Hide file tree
Showing 6 changed files with 57 additions and 107 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ OPEN_PR="true"
# If one wants to force the installation of a given rc version
# /!\ WARNING /!\: This version should NEVER be a wildcard as it might create some
# issues when trying to run it in the future.
CP_VERSION_SPEC_FOR_RC="concrete-python==2.2.0"
CP_VERSION_SPEC_FOR_RC="concrete-python==2.4.0rc1"

# If one wants to use the last RC version
# CP_VERSION_SPEC_FOR_RC="$$(poetry run python \
Expand Down
2 changes: 1 addition & 1 deletion deps_licenses/licenses_linux_user.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ certifi, 2023.7.22, Mozilla Public License 2.0 (MPL 2.0)
charset-normalizer, 3.2.0, MIT License
click, 8.1.7, BSD License
coloredlogs, 15.0.1, MIT License
concrete-python, 2.2.0, BSD-3-Clause
concrete-python, 2.4.0rc1, BSD-3-Clause
dependencies, 2.0.1, BSD License
dill, 0.3.7, BSD License
exceptiongroup, 1.1.3, MIT License
Expand Down
2 changes: 1 addition & 1 deletion deps_licenses/licenses_linux_user.txt.md5
Original file line number Diff line number Diff line change
@@ -1 +1 @@
88fe2fa1ad3c3d2c74d44e85dd4d9662
60ba8097c49614bc52a447b05062107f
3 changes: 0 additions & 3 deletions docs/advanced_examples/ConvolutionalNeuralNetwork.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -525,9 +525,6 @@
"metadata": {
"execution": {
"timeout": 10800
},
"language_info": {
"name": "python"
}
},
"nbformat": 4,
Expand Down
34 changes: 0 additions & 34 deletions src/concrete/ml/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@
import numpy
import onnx
import torch
from concrete.fhe import ParameterSelectionStrategy
from concrete.fhe.compilation.configuration import Configuration
from concrete.fhe.dtypes import Integer
from sklearn.base import is_classifier, is_regressor

Expand Down Expand Up @@ -584,35 +582,3 @@ def all_values_are_of_dtype(*values: Any, dtypes: Union[str, List[str]]) -> bool
supported_dtypes[dtype] = supported_dtype

return all(_is_of_dtype(value, supported_dtypes) for value in values)


# Remove this function once Concrete Python fixes the multi-parameter bug with KNN
# circuits
# FIXME: https://github.com/zama-ai/concrete-ml-internal/issues/3978
def force_mono_parameter_in_configuration(configuration: Optional[Configuration], **kwargs):
"""Force configuration to mono-parameter strategy.
If the given Configuration instance is None, build a new instance with mono-parameter and the
additional keyword arguments.
Args:
configuration (Optional[Configuration]): The configuration to consider.
**kwargs: Additional parameters to use for instantiating a new Configuration instance, if
configuration is None.
Returns:
configuration (Configuration): A configuration with mono-parameter strategy.
"""
assert (
"parameter_selection_strategy" not in kwargs
), "Please do not provide a parameter_selection_strategy parameter as it will be set to MONO."

if configuration is None:
configuration = Configuration(
parameter_selection_strategy=ParameterSelectionStrategy.MONO, **kwargs
)

else:
configuration.parameter_selection_strategy = ParameterSelectionStrategy.MONO

return configuration
121 changes: 54 additions & 67 deletions src/concrete/ml/sklearn/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from typing import Any, Callable, Dict, List, Optional, Set, TextIO, Type, Union

import brevitas.nn as qnn
import concrete.fhe as cnp
import numpy
import onnx
import sklearn
Expand All @@ -37,7 +38,6 @@
USE_OLD_VL,
FheMode,
check_there_is_no_p_error_options_in_configuration,
force_mono_parameter_in_configuration,
generate_proxy_function,
manage_parameters_for_pbs_errors,
)
Expand Down Expand Up @@ -1999,56 +1999,59 @@ def scatter1d(x, v, indices):
# d: Length of the bitonic sequence
d = p

for bq in range(ln2n - 1, t - 1, -1):
q = 2**bq
# Determine the range of indexes to be compared
range_i = numpy.array(
[i for i in range(0, n - d) if i & p == r and comparisons[i] < k]
)
if len(range_i) == 0:
# Edge case, for k=1
continue

# Select 2 bitonic sequences `a` and `b` of length `d`
# a = x[range_i]: first bitonic sequence
# a_i = idx[range_i]: Indexes of a_i elements in the original x
a = gather1d(x, range_i)
# a_i = gather1d(idx, range_i)
# b = x[range_i + d]: Second bitonic sequence
# b_i = idx[range_i + d]: Indexes of b_i elements in the original x
b = gather1d(x, range_i + d)
# b_i = gather1d(idx, range_i + d)

labels_a = gather1d(labels, range_i) #
labels_b = gather1d(labels, range_i + d) # idx[range_i + d]

# Select max(a, b)
diff = a - b
max_x = a + numpy.maximum(0, b - a)

# Swap if a > b
# x[range_i] = max_x(a, b): First bitonic sequence gets min(a, b)
x = scatter1d(x, a + b - max_x, range_i)
# x[range_i + d] = min(a, b): Second bitonic sequence gets max(a, b)
x = scatter1d(x, max_x, range_i + d)

# Max index selection
is_a_greater_than_b = diff <= 0

# Update labels array according to the max items
max_labels = labels_a + (labels_b - labels_a) * is_a_greater_than_b
labels = scatter1d(labels, labels_a + labels_b - max_labels, range_i)
labels = scatter1d(labels, max_labels, range_i + d)

# Update
comparisons[range_i + d] = comparisons[range_i + d] + 1
d = q - p
r = p

# Return only the topk indexes
topk_labels = fhe_array(labels[:k])

return topk_labels
with cnp.tag(f"top_k_t_{t}"):
for bq in range(ln2n - 1, t - 1, -1):
q = 2**bq
# Determine the range of indexes to be compared
range_i = numpy.array(
[i for i in range(0, n - d) if i & p == r and comparisons[i] < k]
)
if len(range_i) == 0:
# Edge case, for k=1
continue

# Select 2 bitonic sequences `a` and `b` of length `d`
# a = x[range_i]: first bitonic sequence
# a_i = idx[range_i]: Indexes of a_i elements in the original x
a = gather1d(x, range_i)
# a_i = gather1d(idx, range_i)
# b = x[range_i + d]: Second bitonic sequence
# b_i = idx[range_i + d]: Indexes of b_i elements in the original x
b = gather1d(x, range_i + d)
# b_i = gather1d(idx, range_i + d)

labels_a = gather1d(labels, range_i) #
labels_b = gather1d(labels, range_i + d) # idx[range_i + d]

with cnp.tag("max"):
# Select max(a, b)
diff = a - b
max_x = a + numpy.maximum(0, b - a)

# Swap if a > b
# x[range_i] = max_x(a, b): First bitonic sequence gets min(a, b)
x = scatter1d(x, a + b - max_x, range_i)
# x[range_i + d] = min(a, b): Second bitonic sequence gets max(a, b)
x = scatter1d(x, max_x, range_i + d)

with cnp.tag("sign"):
# Max index selection
is_a_greater_than_b = diff <= 0

# Update labels array according to the max items
with cnp.tag("label_swap"):
max_labels = labels_a + (labels_b - labels_a) * is_a_greater_than_b

with cnp.tag("label_set"):
labels = scatter1d(labels, labels_a + labels_b - max_labels, range_i)
labels = scatter1d(labels, max_labels, range_i + d)

# Update
comparisons[range_i + d] = comparisons[range_i + d] + 1
d = q - p
r = p

return labels[0 : self.n_neighbors]

# 1. Pairwise_euclidiean distance
distance_matrix = pairwise_euclidean_distance(q_X)
Expand All @@ -2062,23 +2065,7 @@ def scatter1d(x, v, indices):

return numpy.expand_dims(topk_labels, axis=0)

# KNN works only for MONO in the latest concrete Python version
# FIXME: https://github.com/zama-ai/concrete-ml-internal/issues/3978
def compile(self, *args, **kwargs) -> Circuit:
# If a configuration instance is given as a positional parameter, set the strategy to
# multi-parameter
if len(args) >= 2:
configuration = force_mono_parameter_in_configuration(args[1])
args_list = list(args)
args_list[1] = configuration
args = tuple(args_list)

# Else, retrieve the configuration in kwargs if it exists, or create a new one, and set the
# strategy to multi-parameter
else:
configuration = kwargs.get("configuration", None)
kwargs["configuration"] = force_mono_parameter_in_configuration(configuration)

return BaseEstimator.compile(self, *args, **kwargs)

def post_processing(self, y_preds: numpy.ndarray) -> numpy.ndarray:
Expand Down

0 comments on commit 274f0e4

Please sign in to comment.