File: remove-duplicates

package info (click to toggle)
recoverjpeg 2.6.3-7
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 232 kB
  • sloc: ansic: 403; cpp: 129; sh: 51; python: 22; makefile: 17
file content (37 lines) | stat: -rwxr-xr-x 820 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
#! /usr/bin/python3
#
# Usage: remove-duplicates
#
# Remove duplicates of the same file in the current directory if -f is
# given.
#
# If -f is not given, duplicate will be identified twice (once in every
# direction).
#

import os

def check_duplicate(orig, copy):
    try:
        if open(orig).read() == open(copy).read():
            print(("Removing %s which is a copy of %s" % (copy, orig)))
            os.unlink(copy)
    except:
        pass

def aggregate():
    d = {}
    for f in os.listdir('.'):
        s = os.stat(f)[6]
        if s in d: d[s].append(f)
        else: d[s] = [f]
    return d

def remove_duplicates(d):
    for v in list(d.values()):
        while v:
            del v[0]
            for c in v[1:]: check_duplicate(v[0], c)

if __name__ == '__main__':
    remove_duplicates(aggregate())