File: volumetric_analysis.py

package info (click to toggle)
python-pyvista 0.46.5-6
  • links: PTS, VCS
  • area: main
  • in suites: forky
  • size: 178,808 kB
  • sloc: python: 94,599; sh: 216; makefile: 70
file content (166 lines) | stat: -rw-r--r-- 4,590 bytes parent folder | download | duplicates (5)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
"""
.. _volumetric_analysis_example:

Volumetric Analysis
~~~~~~~~~~~~~~~~~~~


Calculate mass properties such as the volume or area of datasets
"""

# sphinx_gallery_thumbnail_number = 4
from __future__ import annotations

import numpy as np

from pyvista import examples

# %%
# Computing mass properties such as the volume or area of datasets in PyVista
# is quite easy using the :func:`pyvista.DataObjectFilters.compute_cell_sizes`
# filter and the :attr:`pyvista.DataSet.volume` property on all PyVista meshes.
#
# Let's get started with a simple gridded mesh:

# Load a simple example mesh
dataset = examples.load_uniform()
dataset.set_active_scalars('Spatial Cell Data')

# %%
# We can then calculate the volume of every cell in the array using the
# ``.compute_cell_sizes`` filter which will add arrays to the cell data of the
# mesh core the volume and area by default.

# Compute volumes and areas
sized = dataset.compute_cell_sizes()

# Grab volumes for all cells in the mesh
cell_volumes = sized.cell_data['Volume']

# %%
# We can also compute the total volume of the mesh using the ``.volume``
# property:

# Compute the total volume of the mesh
volume = dataset.volume

# %%
# But what if we have a dataset that we threshold with two
# volumetric bodies left over in one dataset? Take this for example:


threshed = dataset.threshold_percent([0.15, 0.50], invert=True)
threshed.plot(show_grid=True, cpos=[-2, 5, 3])

# %%
# We could then assign a classification array for the two bodies, compute the
# cell sizes, then extract the volumes of each body. Note that there is a
# simpler implementation of this below in :ref:`split_vol`.

# Create a classifying array to ID each body
rng = dataset.get_data_range()
cval = ((rng[1] - rng[0]) * 0.20) + rng[0]
classifier = threshed.cell_data['Spatial Cell Data'] > cval

# Compute cell volumes
sizes = threshed.compute_cell_sizes()
volumes = sizes.cell_data['Volume']

# Split volumes based on classifier and get the volumes
idx = np.argwhere(classifier)
hvol = np.sum(volumes[idx])
idx = np.argwhere(~classifier)
lvol = np.sum(volumes[idx])

print(f'Low grade volume: {lvol}')
print(f'High grade volume: {hvol}')
print(f'Original volume: {dataset.volume}')

# %%
# Or better yet, you could simply extract the largest volume from your
# dataset directly by passing ``'largest'`` to the ``connectivity`` and
# specifying the scalar range of interest.

# Grab the largest connected volume within a scalar range
scalar_range = [0, 77]  # Range corresponding to bottom 15% of values
largest = threshed.connectivity('largest', scalar_range=scalar_range)

# Get volume as numeric value
large_volume = largest.volume

# Display it
largest.plot(show_grid=True, cpos=[-2, 5, 3])


# %%
# -----
#
# .. _split_vol:
#
# Splitting Volumes
# +++++++++++++++++
#
# What if instead, we wanted to split all the different connected bodies /
# volumes in a dataset like the one above? We could use the
# :func:`pyvista.DataSetFilters.split_bodies` filter to extract all the
# different connected volumes in a dataset into blocks in a
# :class:`pyvista.MultiBlock` dataset. For example, lets split the thresholded
# volume in the example above:

# Load a simple example mesh
dataset = examples.load_uniform()
dataset.set_active_scalars('Spatial Cell Data')
threshed = dataset.threshold_percent([0.15, 0.50], invert=True)

bodies = threshed.split_bodies()

for i, body in enumerate(bodies):
    print(f'Body {i} volume: {body.volume:.3f}')

# %%


bodies.plot(show_grid=True, multi_colors=True, cpos=[-2, 5, 3])


# %%
# -----
#
# A Real Dataset
# ++++++++++++++
#
# Here is a realistic training dataset of fluvial channels in the subsurface.
# This will threshold the channels from the dataset then separate each
# significantly large body and compute the volumes for each.
#
# Load up the data and threshold the channels:

data = examples.load_channels()
channels = data.threshold([0.9, 1.1])

# %%
# Now extract all the different bodies and compute their volumes:

bodies = channels.split_bodies()
# Now remove all bodies with a small volume
for key in bodies.keys():
    b = bodies[key]
    vol = b.volume
    if vol < 1000.0:
        del bodies[key]
        continue
    # Now lets add a volume array to all blocks
    b.cell_data['TOTAL VOLUME'] = np.full(b.n_cells, vol)

# %%
# Print out the volumes for each body:

for i, body in enumerate(bodies):
    print(f'Body {i:02d} volume: {body.volume:.3f}')

# %%
# And visualize all the different volumes:

bodies.plot(scalars='TOTAL VOLUME', cmap='viridis', show_grid=True)
# %%
# .. tags:: filter