summaryrefslogtreecommitdiff
path: root/dupfiles
blob: 6c0d2b80f443849ceb0bfdfcf59afc252a9bf93a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
#!/usr/bin/python
#
# Find duplicate files and do something with them.
# Copyright 2010  Lars Wirzenius
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.


import hashlib
import os
import sys


class DuplicateFileFinder(object):

    def __init__(self):
        self.by_size = dict()
        
    def collect(self, root):
        for dirname, subdirs, filenames in os.walk(root):
            pathnames = [os.path.join(dirname, f) for f in filenames]
            for pathname in pathnames:
                stat = os.stat(pathname)
                if stat.st_size in self.by_size:
                    dev, ino, pathnames = self.by_size[stat.st_size]
                    if stat.st_dev != dev or stat.st_ino != ino:
                        pathnames.add(pathname)
                else:
                    self.by_size[stat.st_size] = (stat.st_dev, stat.st_ino,
                                                  set([pathname]))

    def duplicates(self):
        for dev, ino, pathnames in self.by_size.itervalues():
            by_checksum = dict()
            for pathname in pathnames:
                checksum = self.file_checksum(pathname)
                if checksum not in by_checksum:
                    by_checksum[checksum] = set()
                by_checksum[checksum].add(pathname)
            for names in by_checksum.itervalues():
                if len(names) > 1:
                    yield names

    def file_checksum(self, pathname):
        return hashlib.md5(file(pathname, 'rb').read()).digest()


def main():
    dupfinder = DuplicateFileFinder()
    for dirname in sys.argv[1:]:
        dupfinder.collect(dirname)
    for duplicates in dupfinder.duplicates():
        print '\n'.join(duplicates)
        print


if __name__ == '__main__':
    main()