File: permutation_test.py

package info (click to toggle)
nipy 0.1.2%2B20100526-2
  • links: PTS, VCS
  • area: main
  • in suites: squeeze
  • size: 11,992 kB
  • ctags: 13,434
  • sloc: python: 47,720; ansic: 41,334; makefile: 197
file content (35 lines) | stat: -rw-r--r-- 1,227 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
import numpy as np
from nipy.neurospin.group.permutation_test import permutation_test_onesample


# Get group data
f = np.load('data/offset_002.npz')
data, vardata, xyz = f['mat'], f['var'], f['xyz']

# Create one-sample permutation test instance 
ptest = permutation_test_onesample(data, xyz, stat_id='wilcoxon')

# Cluster definition: (threshold, diameter)
# Note that a list of definitions can be passed to ptest.calibrate
cluster_def = (ptest.height_threshold(0.01), None)
print cluster_def

# Multiple calibration
# To get accurate pvalues, don't pass nperms (default is 1e4)
# Yet it will take longer to run  
voxel_res, cluster_res, region_res = ptest.calibrate(nperms=100, clusters=[cluster_def])

# Simulated Zmax values for FWER correction
simu_zmax = ptest.zscore(voxel_res['perm_maxT_values'])

# Output regions 
clusters = cluster_res[0] ## This is a list because several cluster definitions can be accepted
sizes = clusters['size_values'] 
clusters_Pcorr = clusters['size_Corr_p_values'] 

# Simulated cluster sizes
simu_s = clusters['perm_size_values']
simu_smax = clusters['perm_maxsize_values']