File: hdf_sd.t

package info (click to toggle)
pdl 1%3A2.4.7%2Bdfsg-2
  • links: PTS
  • area: main
  • in suites: squeeze
  • size: 10,128 kB
  • ctags: 5,821
  • sloc: perl: 26,328; fortran: 13,113; ansic: 9,378; makefile: 71; sh: 50; sed: 6
file content (324 lines) | stat: -rw-r--r-- 8,394 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
#!/usr/bin/perl -w
#
# t/hdf_sd.t
#
# Tests the SD interface to the HDF library.
#
# 29 March 2006
# Judd Taylor, USF IMaRS
#
use strict;
use PDL;
use Test::More;

BEGIN
{
    eval( " use PDL::IO::HDF; " );
    if( $@ )
    {
        plan skip_all => "PDL::IO::HDF module not available.";
    }  
    else
    {
        plan tests => 32;
    }
}

use ExtUtils::testlib;

sub tapprox
{
    my $a = shift;
    my $b = shift;
    my $d = abs($a - $b);
    #ok( all($d < 1.0e-5) );
    return all($d < 1.0e-5);
}

use PDL::Config;
my $tmpdir = $PDL::Config{TEMPDIR};

my $testfile = "$tmpdir/sdtest.hdf";

use PDL::IO::HDF::SD;

### Creating and writing to a HDF file
    
#Create an HDF file
my $SDobj = PDL::IO::HDF::SD->new( "-$testfile" );
        
#Define some data
my $data = sequence(short, 500, 5);

# TEST 1:
#Put data in file as 'myData' dataset
#with the names of dimensions ('dim1' and 'dim2')
ok( $SDobj->SDput("myData", $data , ['dim1','dim2']), 'SDput()' );

# TEST 2:
#Put some local attributs in 'myData'
#Set the fill value as 0
ok( $SDobj->SDsetfillvalue("myData", 0), 'SDsetfillvalue()' );

# TEST 3:
#Set the valid range from 0 to 2000
ok( $SDobj->SDsetrange("myData", [0, 2000]), 'SDsetrange()' );

# TEST 4:
#Set the default calibration for 'myData' (scale factor = 1, other = 0)
ok( $SDobj->SDsetcal("myData"), 'SDsetcal()' );

# TEST 5:
#Set a global text attribut
ok( $SDobj->SDsettextattr('This is a global text test!!', "myGText" ), 'SDsettextattr() (global)' );

# TEST 6:
#Set a local text attribut for 'myData'
ok( $SDobj->SDsettextattr('This is a local text testl!!', "myLText", "myData" ), 'SDsettextattr() (local)' );

# TEST 7:
#Set a global value attribut (you can put all values you want)
ok( $SDobj->SDsetvalueattr( PDL::short( 20 ), "myGValue"), 'SDSetvalueattr() (global)' );

# TEST 8:
#Set a local value attribut (you can put all values you want)
ok( $SDobj->SDsetvalueattr( PDL::long( [20, 15, 36] ), "myLValues", "myData" ), 'SDSetvalueattr() (local)' );

#Close the file
$SDobj->close;

# TEST 9:
# Test Hishdf:
ok( PDL::IO::HDF::SD::Hishdf( $testfile ), 'Hishdf()' );
    
### Reading from a HDF file

#Open an HDF file in read only mode
my $SDobj2 = PDL::IO::HDF::SD->new( $testfile );

# TEST 10:
#Get a list of all datasets
my @dataset_list = $SDobj2->SDgetvariablenames();
ok( $#dataset_list+1, 'SDgetvariablenames()' );

# TEST 11:
#Get a list of all global attributes name
my @globattr_list = $SDobj2->SDgetattributenames();
ok( $#globattr_list+1, 'SDgetattributenames() (global)' );

# TEST 12:
#Get a list of local attributes name for a dataset
my @locattr_list = $SDobj2->SDgetattributenames( "myData" );
#print "\@locattr_list = " . join(", ", @locattr_list ) . "\n";
ok( $#locattr_list+1, 'SDgetattributenames() (local)' );

# TEST 13:
#Get the value of local attribute for a dataset
my $value = $SDobj2->SDgetattribute( "myLText", "myData" );
ok( defined($value), 'SDgetattribute() (local)' );

# TEST 14:
#Get the all dataset 'myData'
$data = $SDobj2->SDget("myData");
ok( $data->nelem() > 0, 'SDget()' );
#print "info : ".$data->info."\n";

# TEST 15:
#Apply the scale factor of 'myData'
my $res = $SDobj2->SDgetscalefactor("myData");
ok( defined($res), 'SDgetscalefactor()' );

# TEST 16:
#Get the fill value
#The fill value corresponding to the BAD value in pdl
$res = $SDobj2->SDgetfillvalue("myData");
ok( defined($res), 'SDgetfillvalue()' );

# TEST 17:
#Get the valid range of datas
my @range = $SDobj2->SDgetrange("myData");
ok( $#range+1, 'SDgetrange()' );

#print Data::Dumper->Dump([$SDobj2],[qw(SDobj2)]);
 
#Now you can do what you want with your data
$SDobj2->close;

#
# These are from the old sdcompress.t test file:
#
undef($data);
my $HDFobj = PDL::IO::HDF::SD->new("-$testfile");

# TEST 18:
#Define some data
$data = ones( short, 5000, 5);
#Put data in file as 'myData' dataset
#with the names of dimensions ('dim1' and 'dim2')
ok( $HDFobj->SDput("myData", $data , ['dim1','dim2']), 'SDput()' );

# TEST 19:
# Compress the SD dataset
# No longer necessary with chunking on by default:
#$res = $HDFobj->SDsetcompress("myData", 5);
ok( 1, 'Compress SD dataset (obsolete)' );

# TEST 20:
$HDFobj->SDput("myData", $data , ['dim1','dim2']);
$data = $HDFobj->SDget("myData");
ok( $data->nelem(), 'SDget()' );

$HDFobj->close();

#
# These tests are from the old 11sdchunk.t test file:
#
my $hdf = PDL::IO::HDF::SD->new( "-$testfile" );

# TEST 21:
# Make sure chunking is on by default:
ok( $hdf->Chunking(), 'Chunking()' );

# Turn off chunking:
$hdf->Chunking(0);

# TEST 22:
# Make sure it's really off:
ok( !$hdf->Chunking(), 'Chunking(0)' );

# Write out a normal dataset:
my $dataset = sequence( byte, 10, 10 );
$res = $hdf->SDput( "NO_CHUNK", $dataset );

# TEST 23:
# Make sure we can write unchunked SDs:
ok( $res, 'SDput() (unchunked)' );

$hdf->close();
undef($hdf);

# TEST 24 & 25:
# Make sure we can read it properly:
$hdf = PDL::IO::HDF::SD->new( $testfile );

my $dataset_test = $hdf->SDget( "NO_CHUNK" );
my $good = ($dataset_test->nelem() > 0) ? 1 : 0;
ok( $good, 'SDget() (unchunked)' );
my $do_skip = $good ? '' : 'Skip if failed previous test!';
SKIP: {
    skip( "Previous test failed!", 1 ) if $do_skip;
    ok( tapprox( $dataset, $dataset_test ), 'comparing datasets written out and read in (unchunked)' );
}

$hdf->close();
undef($hdf);
unlink( $testfile );

# Reopen to write out the chunked portion:
$hdf = PDL::IO::HDF::SD->new( "-$testfile" );

my $dataset2d = sequence( long, 200, 200 );

# TEST 26:
# Make sure the chunked write works:
$res = $hdf->SDput( "CHUNK_2D", $dataset2d );
ok( $res, 'SDput() (chunked, 2D)' );

# TEST 27:
# Make sure it works with more than 2 dims:
my $dataset3d = sequence( long, 200, 200, 10 );
$res = $hdf->SDput( "CHUNK_3D", $dataset3d );
ok( $res, 'SDput() (chunked, 3D)');

$hdf->close();
undef($hdf);

# Verify the datasets we just wrote:
$hdf = PDL::IO::HDF::SD->new( $testfile );

# TEST 28 & 29:
my $dataset2d_test = $hdf->SDget( "CHUNK_2D" );
$good = $dataset2d_test->nelem() > 0;
ok( $good, 'SDget() (chunked, 2D)' );
$do_skip = $good ? '' : 'Skip if failed previous test!';
SKIP: {
    skip( "Previous test failed!", 1 ) if $do_skip;
    ok( tapprox( $dataset2d, $dataset2d_test ), 'comparing datasets written out and read in (chunked, 2D)' );
}

# TEST 30 & 31:
my $dataset3d_test = $hdf->SDget( "CHUNK_3D" );
$good = $dataset3d_test->nelem() > 0;
ok( $good, 'SDget() (chunked, 3D)' );
$do_skip = $good ? '' : 'Skip if failed previous test!';
SKIP: {
    skip( "Previous test failed!", 1 ) if $do_skip;
    ok( tapprox( $dataset3d, $dataset3d_test ), 'comparing datasets written out and read in (chunked, 3D)' );
}

$hdf->close();
undef($hdf);

#
# These tests are from the old 07hdfdump.t test file:
#
my $H = PDL::IO::HDF->new( $testfile );

print ">>Global attributes :\n";
foreach my $attr ( $$H{SD}->SDgetattributenames() )
{
    my $curattr = $$H{SD}->SDgetattribute( $attr );
    print "\t$attr = \n";
    foreach ( split("\n", $curattr) )
        { print "\t\t$_\n"; }
}
    
print ">>Datasets :\n";
foreach my $dataset ( $$H{SD}->SDgetvariablenames() )
{
    print "\t$dataset\n";
    print "\t\tdimensions : \n";	
    my @dimname = $$H{SD}->SDgetdimname( $dataset );
    my @dimsize = $$H{SD}->SDgetdimsize( $dataset );
    my @dimsizeU = $$H{SD}->SDgetunlimiteddimsize( $dataset );
    foreach ( my $i = 0; $i <= $#dimsize; $i++ )
    {
        print "\t\t\t$dataset:$dimname[$i] = " 
            . ( (!$dimsize[$i]) ? "$dimsizeU[$i] (UNLIMITED)\n" : "$dimsize[$i]\n");
    }
        
    print "\t\tlocal attributes : \n";
    foreach my $locattr ( $$H{SD}->SDgetattributenames( $dataset ) )
    {
        my $curattr = $$H{SD}->SDgetattribute( $locattr, $dataset );
        print "\t\t\t$dataset:$locattr = $curattr\n";
    }
}
    
print ">>VData :\n";
foreach my $Vname ( $$H{VS}->VSgetnames() )
{
    if( !$$H{VS}->VSisattr($Vname) )
    {
        print "\t$Vname\n";
        foreach my $Vfieldname ( $$H{VS}->VSgetfieldnames( $Vname ) )
        {
            my $val = $$H{VS}->VSread( $Vname, $Vfieldname);
            print "\t\t$Vfieldname " 
                . ( ( $val->nelem > 10 ) ? ": too many values!\n" : "= $val\n" );
                
            print "**** $val\n"
                if( $Vfieldname eq 'attach_flag' );
        }
    }
}

# TEST 32:
$H->close();
ok( 1 );


# Remove the testfile:
unlink( $testfile );

exit(0);