From 390c511c7d689f5acc522367fda1a5185e8a04cc Mon Sep 17 00:00:00 2001 From: JiayuSuPKU Date: Tue, 19 Sep 2023 15:33:26 -0400 Subject: [PATCH] Bug fix --- simulation/002_zone2counts.py | 2 +- smoother/losses.py | 2 +- smoother/simulation/utils.py | 4 ++-- smoother/utils.py | 11 +++++++++-- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/simulation/002_zone2counts.py b/simulation/002_zone2counts.py index fb020db..06443a2 100644 --- a/simulation/002_zone2counts.py +++ b/simulation/002_zone2counts.py @@ -33,7 +33,7 @@ def main(): type = str, default = None, help = 'Output directory. Default: None to use the same directory as the input.') - prs.add_argument('-oa','--output-anndata', action=argparse.BooleanOptionalAction, default=True, + prs.add_argument('-oa','--output-anndata', action='store_true', help = 'Whether to output entire anndata objects for synthetic spatial data ' \ 'and paired single cell data. Default: True.' ) diff --git a/smoother/losses.py b/smoother/losses.py index a0bef71..f5a47ac 100644 --- a/smoother/losses.py +++ b/smoother/losses.py @@ -308,7 +308,7 @@ def calc_corr_decay_stats(self, coords, min_k = 0, max_k = 50, cov_ind = 0, retu try: cov = torch.cholesky_inverse(torch.linalg.cholesky(self.inv_cov[cov_ind].to_dense())) except RuntimeError as exc: - raise RuntimeError(f"The current loss ({self.prior}, l={self.rho}) " + raise RuntimeError(f"The current loss ({self.prior}, rho={self.rho}) " "contains an improper spatial covariance structure. " "Please use a different spatial prior or scale " "if you intend to calculate covariance decay.") from exc diff --git a/smoother/simulation/utils.py b/smoother/simulation/utils.py index 3558276..5427ddf 100644 --- a/smoother/simulation/utils.py +++ b/smoother/simulation/utils.py @@ -11,7 +11,7 @@ from scipy.sparse import csr_matrix,lil_matrix import anndata from tqdm import tqdm -from smoother.weights import _coordinate_to_weights_knn +from smoother.weights import coordinate_to_weights_knn_dense def sample_cell_indices(generation_snrna, annot_label, cell_count_df, cell_capture_eff_df): @@ -66,7 +66,7 @@ def smooth_by_neighbors(shared_prop, locations2cells_matrix, coords, n_experimen smoothed cell index matrix (n_spots x n_cells). ''' # compute adjacency matrix - swm = _coordinate_to_weights_knn(coords, k=4, row_scale=True) * shared_prop + swm = coordinate_to_weights_knn_dense(coords, k=4, row_scale=True) * shared_prop swm = swm.fill_diagonal_(1 - shared_prop) # compute smoothed cell index matrix diff --git a/smoother/utils.py b/smoother/utils.py index 6ef2b87..81f7138 100644 --- a/smoother/utils.py +++ b/smoother/utils.py @@ -41,12 +41,19 @@ def normalize_minmax(x, min_zero = True, return_scale = False): return x_norm -def _pca(features, dim): - """Dimension reduction by PCA.""" +def _z_score(features): + """Z-score standardization.""" # standardize and remove constant features + # features.shape: num_feature x num_spot features_var = features.std(1) # feature level variance features_scaled = (features - features.mean(1, keepdim=True)) / features_var[:, None] features_scaled = features_scaled[features_var > 0,:] + return features_scaled + +def _pca(features, dim): + """Dimension reduction by PCA.""" + # standardize and remove constant features + features_scaled = _z_score(features) # run pca torch.manual_seed(0) # for repeatability, fix the random seed for pca_lowrank