# 具体代码
from bs4 import BeautifulSoup
import requests
import time
detail_url = []
def get_detailurl(page):
source_url = 'http://bj.58.com/pbdn/0/pn'+str(page)
web_data = requests.get(source_url)
time.sleep(1)
soup = BeautifulSoup(web_data.text, 'lxml')
edetail_urls = soup.select('td.t > a.t ')
for urls in edetail_urls:
detail_urls = urls.get('href')
if detail_urls.rfind('http://bj.58.com') != -1:
detail_url.append(detail_urls)
print(detail_url)
def get_view(api):
for a in detail_url:
header = {'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, sdch',
'Accept-Language':'zh-CN,zh;q=0.8',
'Cache-Control':'max-age=0',
'Connection':'keep-alive',
'Cookie': 'id58=8vXrx1dBfEKQ6mZc+CYong==; mcity=qhd; myfeet_tooltip=end; __utma=253535702.1066767632.1463909395.'
'1463909395.1463909395.1; __utmz=253535702.1463909395.1.1.utmcsr=link|utmccn=(not%20set)|utmcmd=(not%2'
'0set); als=0; bj=2016521172959; 58home=bj; ipcity=qhd%7C%u79E6%u7687%u5C9B%7C0; city=bj; bj58_id58s="'
'eUcyN3dTaXVXTi1QNDA4OA=="; bangbigtip2=1; sessionid=690e8177-c481-4cc4-b56c-0676790c6ecc; 58tj_uuid=d8'
'55a902-f081-4b23-9340-1b08713c0090; new_uv=3; final_history=26090732655920%2C26098857137450%2C26100784016'
'311%2C26098181862586%2C26091423827534; bj58_new_session=1; bj58_init_refer="http://bj.58.com/pbdn/1/"; b'
'j58_new_uv=4',
'Host': 'jst1.58.com',
'Referer': 'http://bj.58.com/pingbandiannao/26090732655920x.shtml?psid=150292251191878423914965113&entinfo=26090732655920_0&iuType=p_1&PGTID=0d305a36-0000-14bb-71c7-bebb30845ebf&ClickID=3',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'
}
ID = a.split('/')[4].split('?')[0].strip('x.shtml')
api_url = 'http://jst1.58.com/counter?infoid={}'.format(ID)
web_data = requests.get(api_url, headers=header)
views = BeautifulSoup(web_data.text, 'lxml').text.split('=')[-1]
return views
def get_info(url):
web_data = requests.get(url)
soup = BeautifulSoup(web_data.text, 'lxml')
item = soup.select('span.crb_i > a')[-1].text
title = soup.select('div.col_sub.mainTitle > h1')[0].text
date = soup.select('ul.mtit_con_left.fl > li.time')[0].text
price = soup.select('div.su_con > span.price ')[0].text
degree = list(soup.select('ul > li > div.su_con')[1].stripped_strings)
area = list(soup.select('.c_25d')[0].stripped_strings) if soup.find_all('span', 'c_25d') else None
view = get_view(url)
data = {
'类目': item,
'名称': title,
'日期': date,
'价格': price,
'新旧程度': degree,
'区域': area,
'浏览量': view
}
print(data)
def get_detail_info(start, end):
for i in range(start, end):
get_detailurl(i)
for k in detail_url:
get_info(k)
get_detail_info(1, 2)
第一周 周作业
最后编辑于 :
©著作权归作者所有,转载或内容合作请联系作者
- 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
- 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
- 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...