Skip to content

Commit

Permalink
concise usage to not overcrowd readme [skip ci]
Browse files Browse the repository at this point in the history
  • Loading branch information
raamana committed Aug 31, 2017
1 parent cfd617e commit 83e12ec
Showing 1 changed file with 8 additions and 58 deletions.
66 changes: 8 additions & 58 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,73 +37,20 @@ This package computes single-subject networks, hence you may need loop over samp
A rough example of usage can be:

```python
from hiwenet import extract as hiwenet
from hiwenet
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import cross_val_score
import numpy as np
import nibabel
import os

# ----------------------------------------------------
# toy examples - modify for your application/need
my_project = '/data/myproject'
subject_list = ['a1', 'b2', 'c3', 'd4']
subject_labels = [1, 1, -1, -1]

num_subjects = len(subject_list)
# number of features (imaging vertex-wise cortical thickness values over the whole brain)
feature_dimensionality = 1000
num_ROIs = 50
edge_weights = np.empty(num_subjects, num_ROIs*(num_ROIs-1)/2.0)

atlas = 'fsaverage'
# ----------------------------------------------------

def get_parcellation(atlas, parcel_param):
"Placeholder to insert your own function to return parcellation in reference space."
parc_path = os.path.join(atlas, 'parcellation_param{}.mgh'.format(parcel_param))
parcel = nibabel.freesurfer.io.read_geometry(parc_path)

return parcel


groups = get_parcellation(atlas, feature_dimensionality)

out_folder = os.path.join(my_project, 'hiwenet')

# choose a method from one among the three groups (metrics, semi-metrics and similarity functions)
metrics = [ 'manhattan', 'minowski', 'euclidean', 'noelle_2', 'noelle_4', 'noelle_5' ]

semi_metric_list = [
'kullback_leibler', 'cosine_1',
'jensen_shannon', 'chi_square',
'chebyshev', 'chebyshev_neg',
'histogram_intersection_1',
'relative_deviation', 'relative_bin_deviation',
'noelle_1', 'noelle_3',
'correlate_1']
similarity_func = ['correlate', 'cosine', 'cosine_2', 'cosine_alt', 'fidelity_based']



def get_features(subject_id):
"Placeholder to insert your own function to read subject-wise features."
features_path = os.path.join(my_project,'base_features', subject_id, 'features.txt')
feature_vector = np.loadtxt(features_path)

return feature_vector


def upper_tri_vec(matrix):
"Returns the vectorized values of upper triangular part of a matrix"

triu_idx = np.triu_indices_from(matrix, 1)
return matrix[triu_idx]

# ------------------------------------------------------------------------------------
# see docs/example_thickness_hiwenet.py for a concrete example
# ------------------------------------------------------------------------------------

for ss, subject in enumerate(subject_list):
features = get_features(subject)
edge_weights_subject = hiwenet(features, groups) # or add weight_method = metrics[ii] to use a various other metrics
edge_weights_subject = hiwenet.extract(features, groups, weight_method = 'kullback_leibler')
edge_weights[ss,:] = upper_tri_vec(edge_weights_subject)

out_file = os.path.join(out_folder, 'hiwenet_{}.txt'.format(subject))
Expand All @@ -120,6 +67,9 @@ scores = cross_val_score(rf, edge_weights, subject_labels)

```

A longer version of this example with full details and concrete details is available [here](docs/example_thickness_hiwenet.py).


## Citation

If you found it useful for your research, please cite it as:
Expand Down

0 comments on commit 83e12ec

Please sign in to comment.