File: largefilesupport.sh

package info (click to toggle)
rdfind 1.4.1-1
  • links: PTS, VCS
  • area: main
  • in suites: bullseye, buster, sid
  • size: 648 kB
  • sloc: cpp: 1,670; sh: 1,622; makefile: 32
file content (39 lines) | stat: -rwxr-xr-x 1,297 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
#!/bin/sh
#This makes sure we can handle files bigger than 2^32-1 bytes

set -e
. "$(dirname "$0")/common_funcs.sh"

reset_teststate

#create a large file, sparse.
filesizem1=2147483647 #size, in bytes. This is no problem.
filesize=$(($filesizem1+1)) #size, in bytes. This is a problematic value.

#below, dd is used and the file is later appended to, to avoid problems
#on Hurd which currently (20130619) can not take $filesize as argument to
#dd without complaining and erroring out.

#make two files, which differ at the first byte to make
#rdfind return fast after comparing the initial part.
echo "a">sparse-file1
echo "b">sparse-file2
dd if=/dev/null of=sparse-file1 bs=1 seek=$filesizem1 count=1
dd if=/dev/null of=sparse-file2 bs=1 seek=$filesizem1 count=1
head -c1 /dev/zero >>sparse-file1
head -c1 /dev/zero >>sparse-file2
#let the filesystem settle
sync

#now run rdfind on the local files. Move them to a subdir
#To prevent rdfind from reading its result file or rdfind.out
mkdir subdir
mv sparse-file* subdir
$rdfind subdir  2>&1 |tee rdfind.out
dbgecho "rdfind ran ok."

#make sure rdfind.out contains the right size
grep -q "^Total size is $((filesize*2)) bytes" rdfind.out

#make sure none could be reduced
grep -q "^It seems like you have 0 files that are not unique$" rdfind.out