#!/usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import print_function
import os, sys, hashlib
class diskwalk(object):
def __init__(self, path):
self.path = path
def paths(self):
path = self.path
files_in_path = []
for dirpath, dirnames, filenames in os.walk(path):
for each_file in filenames:
fullpath = os.path.join(dirpath, each_file)
files_in_path.append(fullpath)
return files_in_path
def create_checksum(path):
fp = open(path,'rb')
checksum = hashlib.md5()
while True:
buffer = fp.read(8192)
if not buffer: break
checksum.update(buffer)
fp.close()
checksum = checksum.digest()
return checksum
def findDupes(path):
record = {}
dup = {}
d = diskwalk(path)
files = d.paths()
for each_file in files:
compound_key = (os.path.getsize(each_file), create_checksum(each_file))
if compound_key in record:
dup[each_file] = record[compound_key]
else:
record[compound_key] = each_file
print(each_file)
return dup
class deletefile(object):
def __init__(self, file_name):
self.file_name = file_name
def delete(self):
print("Deleting %s" % self.file_name)
os.remove(self.file_name)
def dryrun(self):
print("Dry Run: %s [NOT DELETED]" % self.file_name)
def interactive(self):
os.remove(self.file_name)
print("删除文件: %s" % self.file_name)
return
def main():
directory_to_check = r"D:\小说合集\小夜监管所B1qq群\202301"
duplicate_file = findDupes(directory_to_check)
for each_file in duplicate_file:
delete = deletefile(each_file)
delete.interactive()
if __name__ == '__main__':
main()
删除文件夹里重复的文件 文件去重
最新推荐文章于 2023-10-24 09:36:42 发布