您的位置:首页 > 其它

一个简单获取网页图片并压缩保存的程序

2015-11-07 17:22 549 查看
import urllib.request as    urllib
from html.parser import HTMLParser
import zipfile
from random import sample
class MyHTMLParser(HTMLParser):                                              # 创建HTML解析类
def __init__(self):
HTMLParser.__init__(self)
self.gifs = []                                                          # 创建列表,保存gif
self.jpgs = []                                                          # 创建列表,保存jpg
def handle_starttag(self, tags, attrs):# 处理起始标记
if tags == 'img' or tags == 'input':                       # 处理图片

for attr in attrs:

for t in attr:
if t:

if 'gif' in t:
self.gifs.append(t)        # 添加到gif列表
elif 'jpg' in t:
self.jpgs.append(t)         # 添加到jpg列表
else:
pass

def get_gifs(self):                                                             # 返回gif列表
return self.gifs
def get_jpgs(self):                                                             # 返回jpg列表
return self.jpgs
url = input('请输入url:')
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '}
req = urllib.Request(url=url, headers=headers)
page = urllib.urlopen(req)
data = page.read()
pathname = 'd:\im\age\'.join(sample('asdfghjklqwyeiuzxc,mb',3))
parser = MyHTMLParser()
parser.feed(data.decode('gbk'))
gifs = parser.get_gifs()
jpgs = parser.get_jpgs()
i=1
z=zipfile.ZipFile(r'%simage.zip'%pathname,'a')
all_img=len(gifs)+len(jpgs)

for gif in gifs:
req = urllib.Request(url=gif, headers=headers)
conn = urllib.urlopen(req)
f = open(r'%s%i.gif'%(pathname,i),'wb')
bs = 1024*8
print('下载开始   %d/%d'%(i,all_img))
while True:
block=conn.read(bs)
if not block:
break
f.write(block)
z.write(r'%s%i.gif'%(pathname,i))
i+=1
conn.close()
f.close()

for jpg in jpgs:
req = urllib.Request(url=jpg, headers=headers)
conn = urllib.urlopen(req)
f = open(r'%s%i.jpg'%(pathname,i),'wb')
bs = 1024*8
print('下载开始   %d/%d'%(i,all_img))
while True:
block=conn.read(bs)
if not block:
break
f.write(block)
z.write(r'%s%i.jpg'%(pathname,i))
i+=1
conn.close()
f.close()
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: