File: calibrations.py

package info (click to toggle)
xarray-safe-rcm 2024.11.0-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 212 kB
  • sloc: python: 1,082; sh: 23; makefile: 4
file content (111 lines) | stat: -rw-r--r-- 3,249 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import posixpath

import numpy as np
import xarray as xr
from tlz.dicttoolz import itemmap, merge_with, valfilter, valmap
from tlz.functoolz import compose_left, curry, flip
from tlz.itertoolz import first

from safe_rcm.product.dicttoolz import keysplit
from safe_rcm.product.reader import execute
from safe_rcm.product.transformers import extract_dataset
from safe_rcm.xml import read_xml


def move_attrs_to_coords(ds, names):
    coords, attrs = keysplit(lambda k: k in names, ds.attrs)

    new = ds.copy()
    new.attrs = attrs

    return new.assign_coords(coords)


def pad_common(dss):
    def compute_padding(item, maximum):
        key, value = item

        return key, (0, maximum[key] - value)

    sizes = [dict(ds.sizes) for ds in dss]
    maximum_sizes = valmap(max, merge_with(list, *sizes))

    pad_widths = [itemmap(flip(compute_padding, maximum_sizes), _) for _ in sizes]

    return [
        ds.pad(padding, mode="constant", constant_values=np.nan)
        for ds, padding in zip(dss, pad_widths)
    ]


def _read_level(mapping):
    return (
        extract_dataset(mapping)
        .pipe(
            lambda ds: ds.swap_dims(
                {first(valfilter(lambda v: v > 1, ds.sizes)): "coefficients"}
            )
        )
        .pipe(lambda ds: ds.reset_coords())
        .pipe(
            move_attrs_to_coords,
            ["sarCalibrationType", "pixelFirstNoiseValue", "stepSize"],
        )
    )


def read_noise_level_file(mapper, path):
    layout = {
        "/referenceNoiseLevel": {
            "path": "/referenceNoiseLevel",
            "f": compose_left(
                curry(map, _read_level),
                curry(map, lambda ds: ds.expand_dims("sarCalibrationType")),
                list,
                curry(xr.combine_by_coords, combine_attrs="drop_conflicts"),
            ),
        },
        "/perBeamReferenceNoiseLevel": {
            "path": "/perBeamReferenceNoiseLevel",
            "f": compose_left(
                curry(map, _read_level),
                curry(map, lambda ds: ds.expand_dims("sarCalibrationType")),
                list,
                pad_common,
                curry(xr.combine_by_coords, combine_attrs="drop_conflicts"),
            ),
        },
        "/azimuthNoiseLevelScaling": {
            "path": "/azimuthNoiseLevelScaling",
            "f": compose_left(
                curry(map, _read_level),
                list,
                pad_common,
                curry(xr.combine_by_coords, combine_attrs="drop_conflicts"),
            ),
        },
    }

    decoded = read_xml(mapper, path)

    converted = valmap(lambda x: execute(**x)(decoded), layout)

    return converted


def read_noise_levels(mapper, root, fnames):
    fnames = fnames.data.tolist()
    paths = [posixpath.join(root, name) for name in fnames]

    poles = [path.removesuffix(".xml").split("_")[1] for path in paths]
    trees = [read_noise_level_file(mapper, path) for path in paths]
    merged = merge_with(list, *trees)
    combined = valmap(
        compose_left(
            curry(xr.concat, dim="pole", combine_attrs="no_conflicts"),
            lambda x: x.assign_coords(pole=poles),
        ),
        merged,
    )

    return xr.DataTree.from_dict(combined)