File: LIGO_bigdog.py

package info (click to toggle)
astroml 1.0.2-6
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 932 kB
  • sloc: python: 5,731; makefile: 3
file content (119 lines) | stat: -rw-r--r-- 3,961 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
"""
Fetch the LIGO BigDog time-domain dataset
"""
import os
from io import BytesIO
from gzip import GzipFile
import numpy as np

from . import get_data_home
from .tools import download_with_progress_bar

DATA_URL_LARGE = ('https://github.com/astroML/astroML-data/raw/main/datasets/'
                  'hoft.968653908-968655956.H1.dat.gz')
LOCAL_FILE_LARGE = 'LIGO_large.npy'

DATA_URL = 'http://www.ligo.org/science/GW100916/HLV-strain.txt'
LOCAL_FILE = 'LIGO_bigdog.npy'


def fetch_LIGO_large(data_home=None, download_if_missing=True):
    """Loader for LIGO large dataset

    Parameters
    ----------
    data_home : optional, default=None
        Specify another download and cache folder for the datasets. By default
        all astroML data is stored in '~/astroML_data'.

    download_if_missing : optional, default=True
        If False, raise a IOError if the data is not locally available
        instead of trying to download the data from the source site.

    Returns
    -------
    data : ndarray
    dt : float
        data represents ~2000s of amplitude data from LIGO hanford;
        dt is the time spacing between measurements in seconds.
    """
    data_home = get_data_home(data_home)

    local_file = os.path.join(data_home, LOCAL_FILE_LARGE)

    if os.path.exists(local_file):
        data = np.load(local_file)

    else:
        if not download_if_missing:
            raise IOError('data not present on disk. '
                          'set download_if_missing=True to download')

        print("downloading LIGO bigdog data from %s to %s"
              % (DATA_URL_LARGE, local_file))

        zipped_buf = download_with_progress_bar(DATA_URL_LARGE,
                                                return_buffer=True)
        gzf = GzipFile(fileobj=zipped_buf, mode='rb')
        print("uncompressing file...")
        extracted_buf = BytesIO(gzf.read())
        data = np.loadtxt(extracted_buf)
        np.save(local_file, data)

    return data, 1. / 4096


def fetch_LIGO_bigdog(data_home=None, download_if_missing=True):
    """Loader for LIGO bigdog event

    Parameters
    ----------
    data_home : optional, default=None
        Specify another download and cache folder for the datasets. By default
        all astroML data is stored in '~/astroML_data'.

    download_if_missing : optional, default=True
        If False, raise a IOError if the data is not locally available
        instead of trying to download the data from the source site.

    Returns
    -------
    data : record array
        The data is 10 seconds of measurements from three sites, along with
        the time of each measurement.

    Examples
    --------
    >>> from astroML.datasets import fetch_LIGO_bigdog
    >>> data = fetch_LIGO_bigdog()  # doctest: +IGNORE_OUTPUT +REMOTE_DATA
    >>> print(data.dtype.names)  # doctest: +REMOTE_DATA
    ('t', 'Hanford', 'Livingston', 'Virgo')
    >>> print(data['t'][:3])  # doctest: +REMOTE_DATA
    [  0.00000000e+00   6.10400000e-05   1.22070000e-04]
    >>> print(data['Hanford'][:3])  # doctest: +REMOTE_DATA
    [  1.26329846e-17   1.26846778e-17   1.19187381e-17]
    """
    data_home = get_data_home(data_home)

    local_file = os.path.join(data_home, LOCAL_FILE)

    if os.path.exists(local_file):
        data = np.load(local_file)

    else:
        if not download_if_missing:
            raise IOError('data not present on disk. '
                          'set download_if_missing=True to download')

        print("downloading LIGO bigdog data from %s to %s"
              % (DATA_URL, local_file))

        buffer = download_with_progress_bar(DATA_URL, return_buffer=True)
        data = np.loadtxt(buffer, skiprows=2,
                          dtype=[('t', 'f8'),
                                 ('Hanford', 'f8'),
                                 ('Livingston', 'f8'),
                                 ('Virgo', 'f8')])
        np.save(local_file, data)

    return data