您的位置:首页 > 编程语言 > Python开发

python3.4-小爬虫

2015-10-01 21:50 615 查看
#coding:UTF-8
__author__ = 'holoto'
from urllib import request#导入包
import re#导入正则表达式
from bs4 import BeautifulSoup#导入包
def user_agent(user):
a={}
a["chrome17"]="User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11"
a["Maxthon"]="User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)"
a['360']="User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)"
a["firefox"]='Mozilla/5.0 (X11; Linux x86_64; rv:41.0) Gecko/20100101 Firefox/41.0'
a["G3_chrome"]='Mozilla/5.0 (Linux; Android 4.4.4; LG G3 Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.89 Mobile Safari/537.36'
a['iphone']='Mozilla/5.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/7.0 Mobile/12A365 Safari/9537.53'
return a[user]#返回需要的useragent
def add_user_agent(url,useragent):#添加user-agent
url.add_header('User-Agent',useragent)
def proxy_server(type_):
proxy_url='http://www.xicidaili.com/wn/'#被爬的网址
proxy_req=request.Request(proxy_url)
proxy_useragent=user_agent('G3_chrome')#获取需要的useragent
add_user_agent(proxy_req,proxy_useragent)#添加useragent
f=request.urlopen(proxy_req)
ff=f.read().decode()
soup=BeautifulSoup(ff,"html5lib")
proxy_ip=soup.find_all(text=re.compile("\d*\.\d*\.\d*\.\d*"),limit=20)#正则表达式匹配ip address
proxy_port=soup.find_all(text=re.compile("^\d{2,4}$"),limit=20)#正则表达式匹配port
proxy_type=soup.find_all(text=re.compile("^HTTPS{0,1}$"),limit=20)#正则表达式匹配协议
print(proxy_ip)#输出匹配的ip
print(proxy_port)#输出匹配的端口
print(proxy_type)#输出匹配的协议版本
proxy_server(1)#函数运行
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息