File: hardlinktest.py

package info (click to toggle)
rdiff-backup 2.2.6-3
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 4,640 kB
  • sloc: python: 24,129; javascript: 9,512; sh: 1,230; ansic: 580; makefile: 36
file content (431 lines) | stat: -rw-r--r-- 19,240 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
import os
import time
import unittest

from commontest import (
    abs_test_dir, abs_output_dir, old_test_dir, re_init_rpath_dir,
    compare_recursive, BackupRestoreSeries, InternalBackup, InternalRestore,
    MakeOutputDir, reset_hardlink_dicts, xcopytree
)
import commontest as comtst
import fileset

from rdiff_backup import Globals, Hardlink, rpath, selection
from rdiffbackup.meta import stdattr


class HardlinkTest(unittest.TestCase):
    """Test cases for Hard links"""
    outputrp = rpath.RPath(Globals.local_connection, abs_output_dir)
    re_init_rpath_dir(outputrp)

    hlinks_dir = os.path.join(old_test_dir, b"hardlinks")
    hlinks_dir1 = os.path.join(hlinks_dir, b"dir1")
    hlinks_dir1copy = os.path.join(hlinks_dir, b"dir1copy")
    hlinks_dir2 = os.path.join(hlinks_dir, b"dir2")
    hlinks_dir3 = os.path.join(hlinks_dir, b"dir3")
    hlinks_rp1 = rpath.RPath(Globals.local_connection, hlinks_dir1)
    hlinks_rp1copy = rpath.RPath(Globals.local_connection, hlinks_dir1copy)
    hlinks_rp2 = rpath.RPath(Globals.local_connection, hlinks_dir2)
    hlinks_rp3 = rpath.RPath(Globals.local_connection, hlinks_dir3)
    hello_str = "Hello, world!"
    hello_str_hash = "943a702d06f34599aee1f8da8ef9f7296031d699"

    def testEquality(self):
        """Test rorp_eq function in conjunction with compare_recursive"""
        self.assertTrue(compare_recursive(self.hlinks_rp1, self.hlinks_rp1copy))
        self.assertTrue(compare_recursive(self.hlinks_rp1,
                                          self.hlinks_rp2,
                                          compare_hardlinks=None))
        self.assertFalse(compare_recursive(
            self.hlinks_rp1, self.hlinks_rp2, compare_hardlinks=1))

    def testBuildingDict(self):
        """See if the partial inode dictionary is correct"""
        Globals.preserve_hardlinks = 1
        reset_hardlink_dicts()
        for dsrp in selection.Select(self.hlinks_rp3).get_select_iter():
            Hardlink.add_rorp(dsrp)

        self.assertEqual(len(list(Hardlink._inode_index.keys())), 3)

    def testCompletedDict(self):
        """See if the hardlink dictionaries are built correctly"""
        reset_hardlink_dicts()
        for dsrp in selection.Select(self.hlinks_rp1).get_select_iter():
            Hardlink.add_rorp(dsrp)
            Hardlink.del_rorp(dsrp)
        self.assertEqual(Hardlink._inode_index, {})

        reset_hardlink_dicts()
        for dsrp in selection.Select(self.hlinks_rp2).get_select_iter():
            Hardlink.add_rorp(dsrp)
            Hardlink.del_rorp(dsrp)
        self.assertEqual(Hardlink._inode_index, {})

    def testSeries(self):
        """Test hardlink system by backing up and restoring a few dirs"""
        dirlist = [
            self.hlinks_dir1, self.hlinks_dir2, self.hlinks_dir3,
            os.path.join(old_test_dir, b'various_file_types')
        ]
        BackupRestoreSeries(None, None, dirlist, compare_hardlinks=1)
        BackupRestoreSeries(1, 1, dirlist, compare_hardlinks=1)

    def testInnerRestore(self):
        """Restore part of a dir, see if hard links preserved"""
        MakeOutputDir()
        output = rpath.RPath(Globals.local_connection, abs_output_dir)
        hlout1_dir = os.path.join(abs_test_dir, b"out_hardlink1")
        hlout2_dir = os.path.join(abs_test_dir, b"out_hardlink2")

        # Now set up directories out_hardlink1 and out_hardlink2
        hlout1 = rpath.RPath(Globals.local_connection, hlout1_dir)
        if hlout1.lstat():
            hlout1.delete()
        hlout1.mkdir()
        hlout1_sub = hlout1.append("subdir")
        hlout1_sub.mkdir()
        hl1_1 = hlout1_sub.append("hardlink1")
        hl1_2 = hlout1_sub.append("hardlink2")
        hl1_3 = hlout1_sub.append("hardlink3")
        hl1_4 = hlout1_sub.append("hardlink4")
        # 1 and 2 are hard linked, as are 3 and 4
        hl1_1.touch()
        hl1_2.hardlink(hl1_1.path)
        hl1_3.touch()
        hl1_4.hardlink(hl1_3.path)

        hlout2 = rpath.RPath(Globals.local_connection, hlout2_dir)
        if hlout2.lstat():
            hlout2.delete()
        xcopytree(hlout1_dir, hlout2_dir)
        hlout2_sub = hlout2.append("subdir")
        hl2_1 = hlout2_sub.append("hardlink1")
        hl2_2 = hlout2_sub.append("hardlink2")
        hl2_3 = hlout2_sub.append("hardlink3")
        hl2_4 = hlout2_sub.append("hardlink4")
        # Now 2 and 3 are hard linked, also 1 and 4
        rpath.copy_with_attribs(hl1_1, hl2_1)
        rpath.copy_with_attribs(hl1_2, hl2_2)
        hl2_3.delete()
        hl2_3.hardlink(hl2_2.path)
        hl2_4.delete()
        hl2_4.hardlink(hl2_1.path)
        rpath.copy_attribs(hlout1_sub, hlout2_sub)

        # Now try backing up twice, making sure hard links are preserved
        InternalBackup(1, 1, hlout1.path, output.path)
        out_subdir = output.append("subdir")
        self.assertEqual(out_subdir.append("hardlink1").getinode(),
                         out_subdir.append("hardlink2").getinode())
        self.assertEqual(out_subdir.append("hardlink3").getinode(),
                         out_subdir.append("hardlink4").getinode())
        self.assertNotEqual(out_subdir.append("hardlink1").getinode(),
                            out_subdir.append("hardlink3").getinode())

        time.sleep(1)
        InternalBackup(1, 1, hlout2.path, output.path)
        out_subdir.setdata()
        self.assertEqual(out_subdir.append("hardlink1").getinode(),
                         out_subdir.append("hardlink4").getinode())
        self.assertEqual(out_subdir.append("hardlink2").getinode(),
                         out_subdir.append("hardlink3").getinode())
        self.assertNotEqual(out_subdir.append("hardlink1").getinode(),
                            out_subdir.append("hardlink2").getinode())

        # Now try restoring, still checking hard links.
        sub_dir = os.path.join(abs_output_dir, b"subdir")
        out2_dir = os.path.join(abs_test_dir, b"out2")
        out2 = rpath.RPath(Globals.local_connection, out2_dir)
        hlout1 = out2.append("hardlink1")
        hlout2 = out2.append("hardlink2")
        hlout3 = out2.append("hardlink3")
        hlout4 = out2.append("hardlink4")

        if out2.lstat():
            out2.delete()
        InternalRestore(1, 1, sub_dir, out2_dir, 1)
        out2.setdata()
        for rp in [hlout1, hlout2, hlout3, hlout4]:
            rp.setdata()
        self.assertEqual(hlout1.getinode(), hlout2.getinode())
        self.assertEqual(hlout3.getinode(), hlout4.getinode())
        self.assertNotEqual(hlout1.getinode(), hlout3.getinode())

        if out2.lstat():
            out2.delete()
        InternalRestore(1, 1, sub_dir, out2_dir, int(time.time()))
        out2.setdata()
        for rp in [hlout1, hlout2, hlout3, hlout4]:
            rp.setdata()
        self.assertEqual(hlout1.getinode(), hlout4.getinode())
        self.assertEqual(hlout2.getinode(), hlout3.getinode())
        self.assertNotEqual(hlout1.getinode(), hlout2.getinode())

    def extract_metadata(self, metadata_rp):
        """Return lists of hashes and hardlink counts in the metadata_rp"""
        hashes = []
        link_counts = []
        comp = metadata_rp.isinccompressed()
        extractor = stdattr.AttrExtractor(metadata_rp.open("r", comp))
        for rorp in extractor.iterate():
            link_counts.append(rorp.getnumlinks())
            if rorp.has_sha1():
                hashes.append(rorp.get_sha1())
            else:
                hashes.append(None)
        return (hashes, link_counts)

    def test_adding_hardlinks(self):
        """Test the addition of a new hardlinked file.

        This test is directed at some previously buggy code that 1) failed to
        keep the correct number of hardlinks in the mirror metadata, and 2)
        failed to restore hardlinked files so that they are linked the same as
        when they were backed up. One of the conditions that triggered these
        bugs included adding a new hardlinked file somewhere in the middle of a
        list of previously linked files.  The bug was originally reported here:
        https://savannah.nongnu.org/bugs/?26848
        """

        # Setup initial backup
        MakeOutputDir()
        output = rpath.RPath(Globals.local_connection, abs_output_dir)
        hlsrc_dir = os.path.join(abs_test_dir, b"src_hardlink")

        hlsrc = rpath.RPath(Globals.local_connection, hlsrc_dir)
        if hlsrc.lstat():
            hlsrc.delete()
        hlsrc.mkdir()
        hlsrc_sub = hlsrc.append("subdir")
        hlsrc_sub.mkdir()
        hl_file1 = hlsrc_sub.append("hardlink1")
        hl_file1.write_string(self.hello_str)
        hl_file3 = hlsrc_sub.append("hardlink3")
        hl_file3.hardlink(hl_file1.path)

        InternalBackup(1, 1, hlsrc.path, output.path, 10000)
        out_subdir = output.append("subdir")
        self.assertEqual(out_subdir.append("hardlink1").getinode(),
                         out_subdir.append("hardlink3").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        meta_prefix = rpath.RPath(
            Globals.local_connection,
            os.path.join(abs_output_dir, b"rdiff-backup-data",
                         b"mirror_metadata"))
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 1)
        metadata_rp = incs[0]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir, ./subdir/hardlink1, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None]
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 2, 2]
        self.assertEqual(expected_link_counts, link_counts)

        # Create a new hardlinked file between "hardlink1" and "hardlink3" and perform another backup
        hl_file2 = hlsrc_sub.append("hardlink2")
        hl_file2.hardlink(hl_file1.path)

        InternalBackup(1, 1, hlsrc.path, output.path, 20000)
        self.assertEqual(out_subdir.append("hardlink1").getinode(),
                         out_subdir.append("hardlink2").getinode())
        self.assertEqual(out_subdir.append("hardlink1").getinode(),
                         out_subdir.append("hardlink3").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 2)
        if incs[0].getinctype() == b'snapshot':
            metadata_rp = incs[0]
        else:
            metadata_rp = incs[1]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir/, ./subdir/hardlink1, ./subdir/hardlink2, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None, None]
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 3, 3, 3]
        # The following assertion would fail as a result of bugs that are now fixed
        self.assertEqual(expected_link_counts, link_counts)

        # Now try restoring, still checking hard links.
        sub_path = os.path.join(abs_output_dir, b"subdir")
        restore_path = os.path.join(abs_test_dir, b"hl_restore")
        restore_dir = rpath.RPath(Globals.local_connection, restore_path)
        hlrestore_file1 = restore_dir.append("hardlink1")
        hlrestore_file2 = restore_dir.append("hardlink2")
        hlrestore_file3 = restore_dir.append("hardlink3")

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 10000)
        for rp in [hlrestore_file1, hlrestore_file3]:
            rp.setdata()
        self.assertEqual(hlrestore_file1.getinode(), hlrestore_file3.getinode())

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 20000)
        for rp in [hlrestore_file1, hlrestore_file2, hlrestore_file3]:
            rp.setdata()
        self.assertEqual(hlrestore_file1.getinode(), hlrestore_file2.getinode())
        # The following assertion would fail as a result of bugs that are now fixed
        self.assertEqual(hlrestore_file1.getinode(), hlrestore_file3.getinode())

    def test_moving_hardlinks(self):
        """Test moving the first hardlinked file in a series to later place in the series.

        This test is directed at some previously buggy code that failed to
        always keep a sha1 hash in the metadata for the first (and only the
        first) file among a series of linked files. The condition that
        triggered this bug involved removing the first file from a list of
        linked files, while also adding a new file at some later position in
        the list. The total number of hardlinked files in the list remains
        unchanged.  None of the files had a sha1 hash saved in its metadata.
        The bug was originally reported here:
        https://savannah.nongnu.org/bugs/?26848
        """

        # Setup initial backup
        MakeOutputDir()
        output = rpath.RPath(Globals.local_connection, abs_output_dir)
        hlsrc_dir = os.path.join(abs_test_dir, b"src_hardlink")

        hlsrc = rpath.RPath(Globals.local_connection, hlsrc_dir)
        if hlsrc.lstat():
            hlsrc.delete()
        hlsrc.mkdir()
        hlsrc_sub = hlsrc.append("subdir")
        hlsrc_sub.mkdir()
        hl_file1 = hlsrc_sub.append("hardlink1")
        hl_file1.write_string(self.hello_str)
        hl_file2 = hlsrc_sub.append("hardlink2")
        hl_file2.hardlink(hl_file1.path)

        InternalBackup(1, 1, hlsrc.path, output.path, 10000)
        out_subdir = output.append("subdir")
        self.assertEqual(out_subdir.append("hardlink1").getinode(),
                         out_subdir.append("hardlink2").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        meta_prefix = rpath.RPath(
            Globals.local_connection,
            os.path.join(abs_output_dir, b"rdiff-backup-data",
                         b"mirror_metadata"))
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 1)
        metadata_rp = incs[0]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir, ./subdir/hardlink1, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None]
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 2, 2]
        self.assertEqual(expected_link_counts, link_counts)

        # Move the first hardlinked file to be last
        hl_file3 = hlsrc_sub.append("hardlink3")
        rpath.rename(hl_file1, hl_file3)

        InternalBackup(1, 1, hlsrc.path, output.path, 20000)
        self.assertEqual(out_subdir.append("hardlink2").getinode(),
                         out_subdir.append("hardlink3").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 2)
        if incs[0].getinctype() == b'snapshot':
            metadata_rp = incs[0]
        else:
            metadata_rp = incs[1]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir/, ./subdir/hardlink2, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None]
        # The following assertion would fail as a result of bugs that are now fixed
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 2, 2]
        self.assertEqual(expected_link_counts, link_counts)

        # Now try restoring, still checking hard links.
        sub_path = os.path.join(abs_output_dir, b"subdir")
        restore_path = os.path.join(abs_test_dir, b"hl_restore")
        restore_dir = rpath.RPath(Globals.local_connection, restore_path)
        hlrestore_file1 = restore_dir.append("hardlink1")
        hlrestore_file2 = restore_dir.append("hardlink2")
        hlrestore_file3 = restore_dir.append("hardlink3")

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 10000)
        for rp in [hlrestore_file1, hlrestore_file2]:
            rp.setdata()
        self.assertEqual(hlrestore_file1.getinode(), hlrestore_file2.getinode())

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 20000)
        for rp in [hlrestore_file2, hlrestore_file3]:
            rp.setdata()
        self.assertEqual(hlrestore_file2.getinode(), hlrestore_file3.getinode())


class BackupUnchangedHardlinksTest(unittest.TestCase):
    """
    Test that rdiff-backup doesn't moan about moving hardlinks over same inode
    """

    def setUp(self):
        self.base_dir = os.path.join(comtst.abs_test_dir,
                                     b"hardlink_unchanged")
        self.from1_struct = {
            "from1": {"contents": {
                "fileA": {"content": "initial", "inode": "fileA"},
                "linkA": {"inode": "fileA"},
                "fileB": {},  # just as canary
            }}
        }
        self.from1_path = os.path.join(self.base_dir, b"from1")
        self.from2_struct = {
            "from2": {"contents": {
                "fileA": {"content": "initial", "inode": "fileA"},
                "linkA": {"inode": "fileA"},
                "fileB": {},  # just as canary
            }}
        }
        self.from2_path = os.path.join(self.base_dir, b"from2")
        fileset.create_fileset(self.base_dir, self.from1_struct)
        fileset.create_fileset(self.base_dir, self.from2_struct)
        fileset.remove_fileset(self.base_dir, {"bak": {"type": "dir"}})
        fileset.remove_fileset(self.base_dir, {"to1": {"type": "dir"}})
        fileset.remove_fileset(self.base_dir, {"to2": {"type": "dir"}})
        self.bak_path = os.path.join(self.base_dir, b"bak")
        self.to1_path = os.path.join(self.base_dir, b"to1")
        self.to2_path = os.path.join(self.base_dir, b"to2")
        self.success = False

    def test_backup_unchanged_hardlinks(self):
        # we backup twice to the same backup repository at different times
        self.assertEqual(comtst.rdiff_backup_action(
            False, False, self.from1_path, self.bak_path,
            ("--api-version", "201", "--current-time", "10000"),
            b"backup", ()), 0)
        self.assertNotIn(
            b"Attempt to rename over same inode:",
            comtst.rdiff_backup_action(
                False, True, self.from2_path, self.bak_path,
                ("--api-version", "201", "--current-time", "20000"),
                b"backup", (), return_stderr=True))

    def tearDown(self):
        # we clean-up only if the test was successful
        if self.success:
            fileset.remove_fileset(self.base_dir, self.from1_struct)
            fileset.remove_fileset(self.base_dir, self.from2_struct)
            fileset.remove_fileset(self.base_dir, {"bak": {"type": "dir"}})
            fileset.remove_fileset(self.base_dir, {"to1": {"type": "dir"}})
            fileset.remove_fileset(self.base_dir, {"to2": {"type": "dir"}})


if __name__ == "__main__":
    unittest.main()