Skip to content

Commit

Permalink
chore: fix typo
Browse files Browse the repository at this point in the history
Fix some typos.
  • Loading branch information
fd0r committed Sep 28, 2023
1 parent 274f0e4 commit 5db5c0f
Show file tree
Hide file tree
Showing 5 changed files with 7 additions and 7 deletions.
4 changes: 2 additions & 2 deletions docs/advanced_examples/GLMComparison.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -97,13 +97,13 @@
"# Getting the original data-set containing the risk features\n",
"# Link: https://www.openml.org/d/41214\n",
"risks_data, _ = fetch_openml(\n",
" data_id=41214, as_frame=True, cache=True, data_home=\"~/.cache/sklean\", return_X_y=True\n",
" data_id=41214, as_frame=True, cache=True, data_home=\"~/.cache/sklearn\", return_X_y=True\n",
")\n",
"\n",
"# Getting the data set containing claims amount\n",
"# Link: https://www.openml.org/d/41215\n",
"claims_data, _ = fetch_openml(\n",
" data_id=41215, as_frame=True, cache=True, data_home=\"~/.cache/sklean\", return_X_y=True\n",
" data_id=41215, as_frame=True, cache=True, data_home=\"~/.cache/sklearn\", return_X_y=True\n",
")"
]
},
Expand Down
2 changes: 1 addition & 1 deletion docs/advanced_examples/PoissonRegression.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@
"outputs": [],
"source": [
"df, _ = fetch_openml(\n",
" data_id=41214, as_frame=True, cache=True, data_home=\"~/.cache/sklean\", return_X_y=True\n",
" data_id=41214, as_frame=True, cache=True, data_home=\"~/.cache/sklearn\", return_X_y=True\n",
")\n",
"df = df.head(50000)"
]
Expand Down
4 changes: 2 additions & 2 deletions docs/advanced_examples/XGBRegressor.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1135,11 +1135,11 @@
"source": [
"# Evaluation\n",
"\n",
"r2_score_sklean = metrics.r2_score(y_test, y_preds_XGBoost)\n",
"r2_score_sklearn = metrics.r2_score(y_test, y_preds_XGBoost)\n",
"r2_score_clear_concrete = metrics.r2_score(y_test, y_preds_clear)\n",
"r2_score_fhe_concrete = metrics.r2_score(y_test, y_preds_fhe)\n",
"\n",
"print(f\"R2_score with XGBoost: {r2_score_sklean:.4f}\")\n",
"print(f\"R2_score with XGBoost: {r2_score_sklearn:.4f}\")\n",
"print(f\"R2_score without FHE : {r2_score_clear_concrete:.4f}\")\n",
"print(f\"R2_score with FHE : {r2_score_fhe_concrete:.4f}\")"
]
Expand Down
2 changes: 1 addition & 1 deletion use_case_examples/federated_learning/federated_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ def get_model_parameters(model: LogisticRegression) -> LogRegParams:


def set_model_params(model: LogisticRegression, params: LogRegParams) -> LogisticRegression:
"""Sets the parameters of a sklean LogisticRegression model."""
"""Sets the parameters of a sklearn LogisticRegression model."""
model.coef_ = params[0]
if model.fit_intercept:
model.intercept_ = params[1]
Expand Down
2 changes: 1 addition & 1 deletion use_case_examples/federated_learning/load_to_cml.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
with path_to_model.open("rb") as file:
sklearn_model = pickle.load(file)

# Compile model without data since the server doesn't have acces to it
# Compile model without data since the server doesn't have access to it
# Indeed in this scenario the users have the data but the server doesn't.
# We then have to compile the model using random input sampled with the same
# low and high bounds as the real data, in this context [0, 255].
Expand Down

0 comments on commit 5db5c0f

Please sign in to comment.