File: script_first_level_bigrams.py

package info (click to toggle)
nipy 0.1.2%2B20100526-2
  • links: PTS, VCS
  • area: main
  • in suites: squeeze
  • size: 11,992 kB
  • ctags: 13,434
  • sloc: python: 47,720; ansic: 41,334; makefile: 197
file content (125 lines) | stat: -rw-r--r-- 4,610 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Script that perform the first-level analysis of a dataset of the FIAC
Last updated by B.Thirion

Author : Lise Favre, Bertrand Thirion, 2008-2009
"""
import os

from numpy import arange

from nipy.externals.configobj import ConfigObj
from nipy.neurospin.utils.mask import compute_mask_files
from nipy.neurospin.glm_files_layout import glm_tools, contrast_tools

# -----------------------------------------------------------
# --------- Set the paths -----------------------------------
#-----------------------------------------------------------

DBPath = "/volatile/thirion/db/word_reading"
Subjects = ["sbt0801651598-0002-00001-000160-01"]#, 
Acquisitions = ["default_acquisition"]
Sessions = ["session_01","session_02","session_03","session_04","session_05"]
model_id = "stimed"
fmri_wc = "asession_*_*.nii"

# ---------------------------------------------------------
# -------- General Information ----------------------------
# ---------------------------------------------------------

tr = 2.4
nb_frames = 154
frametimes = arange(nb_frames) * tr
Conditions = [ 'trial_%04d' %i for i in range(230) ]

# ---------------------------------------------------------
# ------ First level analysis parameters ---------------------
# ---------------------------------------------------------

#---------- Masking parameters 

infTh = 0.75
supTh = 0.95

#---------- Design Matrix

hrf_model = "Canonical"
drift_model = "Cosine"
hfcut = 128

#-------------- GLM options
# Possible choices : "Kalman_AR1", "Kalman", "Ordinary Least Squares"
fit_algo = "Ordinary Least Squares"#"Kalman_AR1"

#-------------- Contrast Options
# Possible choices : "Contrast Name" or "Contrast Number"
save_mode = "Contrast Name"

# ------------------------------------------------------------------
# Launching Pipeline on all subjects, all acquisitions, all sessions 
# -------------------------------------------------------------------

# fixme : normally all that part should be data-independent,
# i.e. a standard user should not have to look at it
# which means that the paths are completely set at that point

# Treat sequentially all subjects & acquisitions
for s in Subjects:
    print "Subject : %s" % s
    
    for a in Acquisitions:
        # step 1. set all the paths
        basePath = os.sep.join((DBPath, s, "fMRI", a))
        paths = glm_tools.generate_all_brainvisa_paths( basePath, Sessions, 
                                                        fmri_wc, model_id)  
          
        misc = ConfigObj(paths['misc'])
        misc["sessions"] = Sessions
        misc["tasks"] = Conditions
        misc["mask_url"] = paths['mask']
        misc.write()

        # step 2. Create one design matrix for each session
        design_matrices = {}
        for sess in Sessions:
            design_matrices[sess] = glm_tools.design_matrix(
                 paths['misc'], paths['dmtx'][sess], sess, paths['paradigm'],
                frametimes, hrf_model=hrf_model, drift_model=drift_model,
                hfcut=hfcut, model=model_id)
                        
        # step 3. Compute the Mask
        # fixme : it should be possible to provide a pre-computed mask
        print "Computing the Mask"
        mask_array = compute_mask_files( paths['fmri'].values()[0][0], 
                                         paths['mask'], True, infTh, supTh)
                                      
        # step 4. Creating functional contrasts
        print "Creating Contrasts"
        clist = contrast_tools.ContrastList(misc=ConfigObj(paths['misc']),
                                            model=model_id)
        contrast = clist.save_dic(paths['contrast_file'])
        CompletePaths = glm_tools.generate_brainvisa_ouput_paths( 
                        paths["contrasts"],  contrast)

        # step 5. Fit the  glm for each session
        glms = {}
        for sess in Sessions:
            print "Fitting GLM for session : %s" % sess
            glms[sess] = glm_tools.glm_fit(
                paths['fmri'][sess], design_matrices[sess],
                paths['glm_dump'][sess], paths['glm_config'][sess],
                fit_algo, paths['mask'])
            
        #step 6. Compute Contrasts
        print "Computing contrasts"
        # this one breaks the memory
        #contrast.pop('effect_of_interest')
        contrast["contrast"].remove('effect_of_interest')
        glm_tools.compute_contrasts(
            contrast, misc, CompletePaths, glms, model=model_id)