# coding:utf-8
import requests
from bs4 import BeautifulSoup
import os , random
import urllib2
import sys , MySQLdb
import re
from PIL import Image
import imagehash
import uuid
from io import BytesIO
# 数据库设置
MYSQL_HOST = 'localhost'
MYSQL_DBNAME = 'ip'
MYSQL_USER = 'mark'
MYSQL_PASSWD = '1234'
MYSQL_PORT = 3306
image_hash_set = set()
# 此处修改伪造的头字段,
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'}
def getProxy ( conn , cur , columns_data ):
flag = random.choice ( columns_data )
sql = "select proxy_ip,proxy_port from proxy where id =%d" % (flag)
cur.execute ( sql )
proxy = {}
for each in cur.fetchall ():
proxy[ 'http' ] = "http://%s:%s" % (each[ 0 ] , each[ 1 ])
try:
requests.get ( 'https://movie.douban.com/top250' , proxies=proxy )
except:
print "proxy error"
getProxy ( conn , cur , columns_data )
else:
print "proxy success"
return proxy
def get_request ( url , headers , conn , cur , columns_data ):
proxy = getProxy ( conn , cur , columns_data )
proxy_s = urllib2.ProxyHandler ( proxy )
opener = urllib2.build_opener ( proxy_s )
urllib2.install_opener ( opener )
req = urllib2.Request ( url , headers=headers )
r = urllib2.urlopen ( req )
return r.read ()
def getImage ( url , headers , conn , cur , columns_data ):
for each in range ( 1 , 7 ):
real_url = "%s%d" % (url , each)
html = get_request(url,headers,conn,cur,columns_data)
soup = BeautifulSoup(html,'html.parser')
for each in soup.findAll('div',attrs = {'id':re.compile('^post_content_(\d+)')}):
for i in each.findAll('img',class_='BDE_Image'):
src = i.get('src')
name = uuid.uuid1()
response = requests.get ( src ).content
image = Image.open ( BytesIO ( response) )
imagehash_tmp = imagehash.average_hash(image)
if imagehash_tmp not in image_hash_set:
print "%s\t%s.jpg" %(src,name)
os.chdir('C:\Users\NorthCity\Desktop\spider\image')
image.save ( "%s.jpg" %(name))
if __name__ == "__main__":
conn = MySQLdb.connect ( host=MYSQL_HOST , user=MYSQL_USER , passwd=MYSQL_PASSWD , db=MYSQL_DBNAME ,
port=MYSQL_PORT , charset='utf8' )
cur = conn.cursor ()
columns = "select id from proxy"
cur.execute ( columns )
columns_data = [ ]
for each in cur.fetchall ():
columns_data.append ( each[ 0 ] )
url = 'https://tieba.baidu.com/p/5033202671?pn='
getImage (url , headers , conn , cur , columns_data )
【美图】杂图_美图吧_百度贴吧 爬虫
最后编辑于 :
©著作权归作者所有,转载或内容合作请联系作者
- 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
- 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
- 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...