1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129
|
extern crate rand;
extern crate half;
use std::io::{BufReader};
use std::fs::File;
use exr::block::reader::ChunksReader;
// exr imports
extern crate exr;
/// Collects the average pixel value for each channel.
/// Does not load the whole image into memory at once: only processes the image block by block.
/// On my machine, this program analyzes a 3GB file while only allocating 1.1MB.
fn main() {
use exr::prelude::*;
let file = BufReader::new(
File::open("3GB.exr")
.expect("run example `7_write_raw_blocks` to generate this image file")
);
// -- the following structs will hold the collected data from the image --
/// Collect averages for each layer in the image
#[derive(Debug)]
struct Layer {
#[allow(unused)] // note: is used in Debug impl
layer_name: Option<Text>,
data_window: IntegerBounds,
/// Collect one average float per channel in the layer
channels: Vec<Channel>,
}
/// A single channel in the layer, holds a single average value
#[derive(Debug)]
struct Channel {
#[allow(unused)] // note: is used in Debug impl
channel_name: Text,
sample_type: SampleType, // f32, u32, or f16
average: f32,
}
let start_time = ::std::time::Instant::now();
// -- read the file, summing up the average pixel values --
// start reading the file, extracting the meta data of the image
let reader = exr::block::read(file, true).unwrap();
// print progress only if it advances more than 1%
let mut current_progress_percentage = 0;
// create the empty data structure that will collect the analyzed results,
// based on the extracted meta data of the file
let mut averages = reader.headers().iter()
// create a layer for each header in the file
.map(|header| Layer {
layer_name: header.own_attributes.layer_name.clone(),
data_window: header.data_window(),
// create a averaging channel for each channel in the file
channels: header.channels.list.iter()
.map(|channel| Channel {
channel_name: channel.name.clone(),
sample_type: channel.sample_type,
average: 0.0
})
.collect()
})
.collect::<Vec<_>>();
// create a reader that loads only relevant chunks from the file, and also prints something on progress
let reader = reader
// do not worry about multi-resolution levels or deep data
.filter_chunks(true, |meta_data, tile, block| {
let header = &meta_data.headers[block.layer];
!header.deep && tile.is_largest_resolution_level()
}).unwrap()
.on_progress(|progress|{
let new_progress = (progress * 100.0) as usize;
if new_progress != current_progress_percentage {
current_progress_percentage = new_progress;
println!("progress: {}%", current_progress_percentage)
}
});
// read all pixel blocks from the image, decompressing in parallel
reader.decompress_parallel(true, |meta_data, block|{
let header = &meta_data.headers[block.index.layer];
// collect all pixel values from the pixel block
for line in block.lines(&header.channels) {
let layer = &mut averages[line.location.layer];
let channel = &mut layer.channels[line.location.channel];
let channel_sample_count = layer.data_window.size.area() as f32;
// now sum the average based on the values in this line section of pixels
match channel.sample_type {
SampleType::F16 => for value in line.read_samples::<f16>() {
channel.average += value?.to_f32() / channel_sample_count;
},
SampleType::F32 => for value in line.read_samples::<f32>() {
channel.average += value? / channel_sample_count;
},
SampleType::U32 => for value in line.read_samples::<u32>() {
channel.average += (value? as f32) / channel_sample_count;
},
}
}
Ok(())
}).unwrap();
println!("average values: {:#?}", averages);
// warning: highly unscientific benchmarks ahead!
println!("\nprocessed file in {:?}s", start_time.elapsed().as_secs_f32());
}
|