summaryrefslogtreecommitdiff
path: root/dupfiles
blob: 039ecdde629252151dcf676d15bcb92926e8c45e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
#!/usr/bin/python
#
# Find duplicate files and do something with them.
# Copyright 2010  Lars Wirzenius
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.


import hashlib
import optparse
import os
import sys
import time


class ProgressReporter(object):

    def __init__(self, do_report):
        self.written = ''
        self.when = 0
        self.do_report = do_report
        
    def write(self, msg):
        if self.do_report and time.time() - self.when >= 1:
            sys.stdout.flush()
            sys.stderr.write('\b \b' * len(self.written))
            msg = msg[:79] # FIXME: use real screen width
            sys.stderr.write(msg)
            sys.stderr.flush()
            self.written = msg
            self.when = time.time()

    def finished(self):
        self.write('')


class DuplicateFileFinder(object):

    def __init__(self, progress):
        self.by_size = dict()
        self.progress = progress
        
    def collect(self, root):
        for dirname, subdirs, filenames in os.walk(root):
            self.progress.write(dirname)
            subdirs.sort()
            pathnames = [os.path.join(dirname, f) for f in filenames]
            for pathname in pathnames:
                stat = os.stat(pathname)
                if stat.st_size in self.by_size:
                    dev, ino, pathnames = self.by_size[stat.st_size]
                    if stat.st_dev != dev or stat.st_ino != ino:
                        pathnames.add(pathname)
                else:
                    self.by_size[stat.st_size] = (stat.st_dev, stat.st_ino,
                                                  set([pathname]))
        self.progress.finished()

    def duplicates(self):
        for dev, ino, pathnames in self.by_size.itervalues():
            by_checksum = dict()
            for pathname in pathnames:
                checksum = self.file_checksum(pathname)
                if checksum not in by_checksum:
                    by_checksum[checksum] = set()
                by_checksum[checksum].add(pathname)
            for names in by_checksum.itervalues():
                if len(names) > 1:
                    yield names

    def file_checksum(self, pathname):
        return hashlib.md5(file(pathname, 'rb').read()).digest()


def make_hardlinks(duplicates):
    canonical = duplicates.pop()
    for pathname in duplicates:
        os.remove(pathname)
        os.link(canonical, pathname)


def report(duplicates):
    sys.stdout.write('\n'.join(duplicates))
    sys.stdout.write('\n\n')


def main():
    parser = optparse.OptionParser()
    parser.add_option('--make-hardlinks', action='store_true',
                      help='hardlink duplicate files to each other')
    parser.add_option('--progress', action='store_true',
                      help='report progress')

    opts, args = parser.parse_args()

    progress = ProgressReporter(opts.progress)
    dupfinder = DuplicateFileFinder(progress)
    for dirname in sorted(args):
        dupfinder.collect(dirname)
    for duplicates in dupfinder.duplicates():
        if opts.make_hardlinks:
            make_hardlinks(duplicates)
        else:
            report(duplicates)


if __name__ == '__main__':
    main()