File: test.56

package info (click to toggle)
dupd 1.7.3-1
  • links: PTS
  • area: main
  • in suites: forky, sid
  • size: 12,688 kB
  • sloc: ansic: 8,381; sh: 879; makefile: 121; perl: 58
file content (28 lines) | stat: -rwxr-xr-x 745 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
#!/usr/bin/env bash

source common

DESC="scan(files6): uniques w/larger files, some differ early"

cd files6
cat files.tar.gz | gunzip | tar xf -
cd ..

$DUPD_CMD scan --path `pwd`/files6 --uniques > /dev/null
checkrv $?

# Test for a bug where unique files were identified more than once
# when belonging to a size set larger than three (thus forcing the full hash)
# if some of the files could've been discarded early in round 1.
# There is no way to detect this via normal dupd interfaces (that is, all
# report output is correct) so the only way to check is in database directly.

echo "select * from files;" | sqlite3 ~/.dupd_sqlite | sort | sed -e 's/.*files6//' > nreport

check_nreport output.56

cd files6
rm -f 1 2 3 4 5
cd ..

tdone