本文实例为大家分享了python查找重复图片并删除的具体代码,供大家参考,具体内容如下
和网络爬虫配套的,也可单独使用,从网上爬下来的图片重复太多,代码支持识别不同尺寸大小一致的图片,并把重复的图片删除,只保留第一份。
# -*- coding: utf-8 -*- import cv2 import numpy as np import os,sys,types def cmpandremove2(path): dirs = os.listdir(path) dirs.sort() if len(dirs) <= 0: return dict={} for i in dirs: prepath = path + "/" + i preimg = cv2.imread(prepath) if type(preimg) is types.NoneType: continue preresize = cv2.resize(preimg, (8,8)) pregray = cv2.cvtColor(preresize, cv2.COLOR_BGR2GRAY) premean = cv2.mean(pregray)[0] prearr = np.array(pregray.data) for j in range(0,len(prearr)): if prearr[j] >= premean: prearr[j] = 1 else: prearr[j] = 0 print "get", prepath dict[i] = prearr dictkeys = dict.keys() dictkeys.sort() index = 0 while True: if index >= len(dictkeys): break curkey = dictkeys[index] dellist=[] print curkey index2 = index while True: if index2 >= len(dictkeys): break j = dictkeys[index2] if curkey == j: index2 = index2 + 1 continue arr1 = dict[curkey] arr2 = dict[j] diff = 0 for k in range(0,len(arr2)): if arr1[k] != arr2[k]: diff = diff + 1 if diff <= 5: dellist.append(j) index2 = index2 + 1 if len(dellist) > 0: for j in dellist: file = path + "/" + j print "remove", file os.remove(file) dict.pop(j) dictkeys = dict.keys() dictkeys.sort() index = index + 1 def cmpandremove(path): index = 0 flag = 0 dirs = os.listdir(path) dirs.sort() if len(dirs) <= 0: return 0 while True: if index >= len(dirs): break prepath = path + dirs[index] print prepath index2 = 0 preimg = cv2.imread(prepath) if type(preimg) is types.NoneType: index = index + 1 continue preresize = cv2.resize(preimg, (8, 8)) pregray = cv2.cvtColor(preresize, cv2.COLOR_BGR2GRAY) premean = cv2.mean(pregray)[0] prearr = np.array(pregray.data) for i in range(0, len(prearr)): if prearr[i] >= premean: prearr[i] = 1 else: prearr[i] = 0 removepath = [] while True: if index2 >= len(dirs): break if index2 != index: curpath = path + dirs[index2] # print curpath curimg = cv2.imread(curpath) if type(curimg) is types.NoneType: index2 = index2 + 1 continue curresize = cv2.resize(curimg, (8, 8)) curgray = cv2.cvtColor(curresize, cv2.COLOR_BGR2GRAY) curmean = cv2.mean(curgray)[0] curarr = np.array(curgray.data) for i in range(0, len(curarr)): if curarr[i] >= curmean: curarr[i] = 1 else: curarr[i] = 0 diff = 0 for i in range(0, len(curarr)): if curarr[i] != prearr[i]: diff = diff + 1 if diff <= 5: print 'the same' removepath.append(curpath) flag = 1 index2 = index2 + 1 index = index + 1 if len(removepath) > 0: for file in removepath: print "remove", file os.remove(file) dirs = os.listdir(path) dirs.sort() if len(dirs) <= 0: return 0 # index = 0 return flag path = 'pics/' cmpandremove(path)
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持。
免责声明:本站文章均来自网站采集或用户投稿,网站不提供任何软件下载或自行开发的软件!
如有用户或公司发现本站内容信息存在侵权行为,请邮件告知! 858582#qq.com
白云城资源网 Copyright www.dyhadc.com
暂无“python查找重复图片并删除(图片去重)”评论...
更新日志
2024年11月13日
2024年11月13日
- 刘欢《雨中的树(新歌加精选)2CD》德国HD24K金碟[WAV+CUE]
- 郑源 《世间情歌》6N纯银SQCD[WAV+CUE][1G]
- 群星《粤潮2HQII》头版限量编号[低速原抓WAV+CUE][991M]
- 群星《2023好听新歌21》十倍音质 U盘音乐[WAV分轨][1G]
- 《热血传奇》双11感恩回馈 超值狂欢30天
- 原神5.2版本活动汇总 5.2版本活动有哪些
- 张敬轩.2010-NO.ELEVEN【环球】【WAV+CUE】
- 黄丽玲.2006-失恋无罪【艾回】【WAV+CUE】
- 阿达娃.2024-Laluna【W8VES】【FLAC分轨】
- 宝可梦大集结段位等级划分表大全 大集结段位一览
- 龙腾世纪影障守护者工坊与装备如何升级 工坊与装备升级说明
- 龙腾世纪影障守护者全成就攻略分享 龙腾世纪4全成就列表一览
- 《剑星》更新四套全新战衣!
- 卡普空老将伊津野英昭宣布入职腾讯光子 开发3A动作
- 38岁梅根·福克斯官宣怀孕:将迎来第四个孩子