第一次大作业,简单的熟悉了requests、BeautifulSoup包的用法
实验成果
代码
#!/usr/bin/env pyhon
import requests
import timefrom bs4 import BeautifulSoup
import time
def get_links(sale=0):
urls = []
url = 'http://bj.58.com/pbdn/{}/pn2/'.format(str(sale))
wb_data = requests.get(url)
if wb_data.status_code == 200:
soup = BeautifulSoup(wb_data.text,'lxml')
for link in soup.select('td.t a.t'):
urls.append(link.get('href'))
return urls
def get_view(url):
url_path = url.split("?")[0]
url_last_part = url_path.split('/')[-1]
id = url_last_part.strip('x.shtml')
api = 'http://jst1.58.com/counter?infoid={}'.format(id)
time.sleep(5)
headers = {
'User-Agent':'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36',
'Cookie':'f=n; f=n; f=n; ipcity=huizhou%7C%u60E0%u5DDE; myfeet_tooltip=end; bj58_id58s="MFFhRi0yRVk0Y0xUMDAzNw=="; id58=c5/ns1fKTxYusyMjA8gkAg==; als=0; sessionid=425f8ca2-2199-4331-9b7c-994f00dc7510; bj58_new_session=0; bj58_init_refer=""; bj58_new_uv=2; 58tj_uuid=e39df2d8-ca6a-42ff-b6e4-c80709fe8724; new_session=0; new_uv=2; utm_source=; spm=; init_refer=',
'Accept-Language':'zh-CN,zh;q=0.8', 'Cache-Control' :'max-age= 0',
'Connection':'keep-alive',
'Host':'bj.58.com',
'Referer' : r'http://bj.58.com/pingbandiannao/{}x.shtml'.format(str(id))
}
js = requests.get(api,headers=headers,allow_redirects=False)
if js.status_code != 200:
return 0
view = js.text.split('=')[-1]
return view
def get_more_detial(sale=0):
urls = get_links(sale)
for url in urls:
wb_data = requests.get(url)
soup = BeautifulSoup(wb_data.text,'lxml')
titles = soup.title.text
dates = soup.select('li.time')
prices = soup.select('#content span.price')
areas = list(soup.select('.c_25d')[0].stripped_strings if soup.find_all('span','c_25d') else "")
time.sleep(5)
data = {
'title': titles,
'data': dates[0].text if len(dates) > 0 else"",
'price': prices[0].text if len(prices) > 0 else 0,
'area': areas,
'cate' : '个人' if sale == 0 else'商家',
'view' : get_view(url)
}
print(data)
print(get_more_detial(1))
总结
- 几个月的时间,网站反爬策略不断更新,这次写的代码还是没能解决view的问题
- 利用向网站传递headers的方法以及不能满足现状了
- 下一步除了学习mongo,有可能还要学习前端