File: run_get_hdf4_files.sh

package info (click to toggle)
netcdf 1:4.7.4-1
  • links: PTS, VCS
  • area: main
  • in suites: bullseye, sid
  • size: 104,952 kB
  • sloc: ansic: 228,683; sh: 10,980; yacc: 2,561; makefile: 1,319; lex: 1,173; xml: 173; awk: 2
file content (59 lines) | stat: -rwxr-xr-x 1,351 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
#!/bin/sh

# This shell gets some sample HDF4 files from the netCDF ftp site for
# testing. Then it runs program tst_interops3 on the test file to
# check that HDF4 reading works.

# Ed Hartnett

if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh

# Uncomment for quiet wget operation.
#Q=-q

# Get a file from the ftp site; retry several times
getfile() {
   FTPFILE="ftp://ftp.unidata.ucar.edu/pub/netcdf/sample_data/hdf4/$1.gz"

   for try in 1 2 3 4 ; do # try 4 times

     # signal success/failure
     if wget -c $Q --passive-ftp $FTPFILE || curl -O $FTPFILE ; then
       return 0 # got it
     fi
     echo "wget failed: try $try"
     sleep 5 # seconds
   done
   return 1 # did not get it
}

set -e
echo ""
echo "Getting HDF4 sample files from Unidata FTP site..."

file_list="AMSR_E_L2_Rain_V10_200905312326_A.hdf AMSR_E_L3_DailyLand_V06_20020619.hdf \
    MYD29.A2009152.0000.005.2009153124331.hdf MYD29.A2002185.0000.005.2007160150627.hdf \
    MOD29.A2000055.0005.005.2006267200024.hdf"
echo "Getting HDF4 test files $file_list"

for f1 in $file_list
do
    if ! test -f $f1; then
	if getfile $f1 ; then
  	  gunzip $f1.gz
	else
          echo Could not ftp $f1.gz
          return 1
	fi
    fi
done


echo ""
echo "Running test program to check HDF4 sample files..."
${execdir}/tst_interops3

echo "SUCCESS!!!"

exit 0