File: 021_multi_url_update

package info (click to toggle)
fsvs 1.2.7-1
  • links: PTS, VCS
  • area: main
  • in suites: bullseye, buster, sid, stretch
  • size: 2,964 kB
  • ctags: 1,464
  • sloc: ansic: 16,650; sh: 5,885; perl: 783; makefile: 338; python: 90
file content (116 lines) | stat: -rwxr-xr-x 2,478 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
#!/bin/bash

# How many parallel working copies are used
DATA_WCs=4
# Which working copy is used for updating
UP_WC=`expr $DATA_WCs + 1`
# Which working copy gets the data per rsync
CMP_WC=`expr $UP_WC + 1`

set -e

$PREPARE_CLEAN WC_COUNT=$CMP_WC > /dev/null
$INCLUDE_FUNCS

logfile=$LOGDIR/021.multiurl.log

for i in `seq 1 $DATA_WCs`
do
  cd $WCBASE$i

	tu=$REPURL/$i
	svn mkdir $tu -m $i
	echo $tu | $BINq urls load
	mkdir dir-$i common common/sdir-$i

	echo $RANDOM | tee bfile-$i dir-$i/dfile-$i 
	echo $RANDOM | tee common/cfile-$i common/sdir-$i/csfile-$i
	echo "Overlay $i" > overlayed

	$BINq ci -m "ci$i"
done

cd $TESTBASE


# In the first run we do an update, the others do changes.
function CheckURL
{
	$BINdflt st -o verbose=none,url > $logfile

	while [[ $# -ne 0 ]]
	do
		path="$1"
		exp="$2"
		shift 2
		if ! grep "$REPURL/$exp/$path"'$' $logfile
		then
			grep "/$path"'$' $logfile || true
			$ERROR "Expected $path to be in URL $exp"
		fi
	done
}

export _WC=$WCBASE$UP_WC
# make -C $TEST_PROG_DIR prepare_wc > /dev/null
cd $_WC

# A nice side-effect is that URL 4 has the highest priority afterwards.
for prio_has in `seq 1 $DATA_WCs`
do
	$INFO "Going with prio_has=$prio_has"

	rm -rf ./*
	rm -f `$PATH2SPOOL $_WC dir`

	# Construct the URL list and build the compare-directory
	parm=--delete
	true | $BINq urls load
	for i in `seq 1 $DATA_WCs`
	do
# rotate the highest-priority URL
	  nr=`perl -e 'print 1+(shift()-1+shift()-1) % shift()' $prio_has $i $DATA_WCs`
		$BINq urls N:u$nr,P:$i,$REPURL/$nr

		# We need to give the checksum parameter, so that rsync isn't misled by 
		# the equal mtimes.
		rsync -a $parm $WCBASE$nr/ $WCBASE$CMP_WC/ -c -c
		parm=--ignore-existing
	done

	$BINdflt up > $logfile
	$COMPARE -d $WCBASE$UP_WC/ $WCBASE$CMP_WC/

	CheckURL dir-1 1 dir-3 3 common $prio_has common/sdir-2 2 common/cfile-3 3
done

$SUCCESS "Priorities are taken into account."


# Test what happens to entries in common directories, if such a directory 
# gets removed.
without=2
cd $WCBASE$without
rm -rf ./*
$BINq ci -m remove $WCBASE$without 

parm=--delete
cd $_WC
for i in `$BINdflt urls dump "%n\n" | cut -c2-`
do
	echo "Sync $i"
	# We need to give the checksum parameter, so that rsync isn't misled by 
	# the equal mtimes.
	if [[ $i -ne $without ]]
	then
		rsync -a $parm $WCBASE$i/ $WCBASE$CMP_WC/ -c -c
	fi
	parm=--ignore-existing
done

$BINdflt up $_WC > $logfile 
$COMPARE -d $_WC/ $WCBASE$CMP_WC/

$SUCCESS "Multi-url update test passed."