1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
|
#!/usr/bin/env python3
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Get two images from the web (one mask image and one spmT image) and put them in
the nipy user dir - usually therefore at ``~/.nipy/tests/data``.
Author : Bertrand Thirion, 2009
"""
import os
try:
from urllib2 import urlopen # Python 2
except ImportError:
from urllib.request import urlopen # Python 3
import tarfile
from nibabel.data import get_nipy_user_dir
NIPY_DIR = get_nipy_user_dir()
DATA_DIR = os.path.join(NIPY_DIR, 'tests', 'data')
def get_second_level_dataset():
""" Lightweight dataset for multi-subject analysis
"""
# define several paths
url = 'ftp://ftp.cea.fr/pub/dsv/madic/download/nipy'
mask_image = os.path.join(DATA_DIR, 'mask.nii.gz')
input_image = os.path.join(DATA_DIR, 'spmT_0029.nii.gz')
group_data = os.path.join(DATA_DIR, 'group_t_images.tar.gz')
# if needed create DATA_DIR
if not os.path.exists(DATA_DIR):
os.makedirs(DATA_DIR)
assert os.path.exists(DATA_DIR)
# download mask_image if necessary
if not os.path.exists(mask_image):
filename = 'mask.nii.gz'
datafile = os.path.join(url, filename)
fp = urlopen(datafile)
local_file = open(mask_image, 'wb')
local_file.write(fp.read())
local_file.flush()
local_file.close()
# download input_image if necessary
if not os.path.exists(input_image):
filename = 'spmT_0029.nii.gz'
datafile = os.path.join(url, filename)
fp = urlopen(datafile)
local_file = open(input_image, 'wb')
local_file.write(fp.read())
local_file.flush()
local_file.close()
# download group_data if necessary
if not os.path.exists(group_data):
filename = 'group_t_images.tar.gz'
datafile = os.path.join(url, filename)
fp = urlopen(datafile)
local_file = open(group_data, 'wb')
local_file.write(fp.read())
local_file.flush()
local_file.close()
# untargzip group_data
tar = tarfile.open(group_data)
tar.extractall(DATA_DIR)
tar.close()
os.remove(group_data)
return DATA_DIR
def get_first_level_dataset():
""" Heavier dataset (30 MO) for first-level analysis
"""
# define several paths
url = 'ftp://ftp.cea.fr/pub/dsv/madic/download/nipy'
raw_fmri = os.path.join(DATA_DIR, 's12069_swaloc1_corr.nii.gz')
paradigm = os.path.join(DATA_DIR, 'localizer_paradigm.csv')
# create DATA_DIR
if not os.path.exists(DATA_DIR):
os.makedirs(DATA_DIR)
assert os.path.exists(DATA_DIR)
# download mask_image if necessary
if not os.path.exists(paradigm):
print('Downloading mask image, this may take time')
datafile = os.path.join(url, 'localizer_paradigm.csv')
fp = urlopen(datafile)
local_file = open(paradigm, 'wb')
local_file.write(fp.read())
local_file.flush()
local_file.close()
# download raw_fmri if necessary
if not os.path.exists(raw_fmri):
print('Downloading fmri image, this may take time')
filename = 's12069_swaloc1_corr.nii.gz'
datafile = os.path.join(url, filename)
fp = urlopen(datafile)
local_file = open(raw_fmri, 'wb')
local_file.write(fp.read())
local_file.flush()
local_file.close()
return DATA_DIR
if __name__ == '__main__':
get_second_level_dataset()
|