import hashlib
import os
# 获取文件md5值
def getFileMd5(filename):
if not os.path.isfile(filename):
return
myhash = hashlib.md5()
with open(filename, 'rb')as f:
while True:
b = f.read(8096)
if not b:
break
myhash.update(b)
return myhash.hexdigest()
def print_progress_bar(description, iteration_counter, total_items, progress_bar_length=20):
"""
输出进度条程序
"""
import sys
percent = float(iteration_counter) / total_items
hashes = '>' * int(round(percent * progress_bar_length))
spaces = ' ' * (progress_bar_length - len(hashes))
sys.stdout.write("\r{0}: [{1}] {2}%".format(description, hashes + spaces, int(round(percent * 100))))
sys.stdout.flush()
if total_items == iteration_counter:
print("\r")
# 列出某路径下所有文件
def listdirs(f, i=1, dirs=None):
if dirs is None:
dirs = []
fs = os.listdir(f)
for f1 in fs:
tmp_path = os.path.join(f, f1)
if not os.path.isdir(tmp_path):
dirs.append('-' * i + str(tmp_path.split(r'/')[-1]))
else:
dirs.append('-' * i + str(tmp_path.split(r'/')[-1]))
listdirs(tmp_path, i + 2, dirs=dirs)
return dirs
import os
import os.path
# 获取某个目录下所有文件大小
def get_total_size(path, list1=None):
if list1 is None:
list1 = []
fileList = os.listdir(path) # 获取path目录下所有文件
for filename in fileList:
pathTmp = os.path.join(path, filename) # 获取path与filename组合后的路径
if os.path.isdir(pathTmp): # 判断是否为目录
get_total_size(pathTmp, list1) # 是目录就继续递归查找
elif os.path.isfile(pathTmp): # 判断是否为文件
filesize = os.path.getsize(pathTmp) # 如果是文件,则获取相应文件的大小
# print('目录中的子文件大小:%d字节' % filesize)
list1.append(filesize) # 将文件的大小添加到列表
return sum(list1)