#coding=utf-8
from time import sleep
import requests
import sys
from pyquery import PyQuery as pq
reload(sys)
sys.setdefaultencoding("utf-8")
class Gra():
def __init__(self):
self.user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
self.headers = {
'User-Agent': self.user_agent
}
fo = open("/Users/luomeng/Desktop/room.txt", "w")
fo.write("")
fo.close()
fo = open("/xxx/room.txt", "a+")
fo.write("城市,日期,出售单价,出售供给量,售租比\n")
fo.close()
def grabData(self, cityid, cityname):
url = "http://www.xxxx.cn/market/chartsdatanew.html"
data = {
'city': cityid,
'proptype': '11',
'district': 'allsq1',
'sinceyear': '5',
'flag': '1',
'isv3': '0',
'type': 'forsale',
'matchrand': 'a0b92382',
'based': 'supply',
'dtype': 'line'
}
cookiesstr = "userchannel=direct; Hm_lvt_c2a7a3cec6f9dd8849155424efab19c7=1556094056; deviceStr=4f0750b83bbd41af37678bf609bfe2b5; cityredata=43464a06c67b93a525ab1b09566f7b73; city="+ cityid +"; thirdLog_fromurl=aHR0cDovL3d3dy5jcmVwcmljZS5jbi91cmJhbi9iai5odG1s; cityurl=e8727615c054b43; Hm_lpvt_c2a7a3cec6f9dd8849155424efab19c7=155615774"
cookies = {}
for line in cookiesstr.split(';'):
name, value = line.strip().split('=', 1)
cookies[name] = value
# 城市
data["city"] = cityid
# 价格
prices = []
data["based"] = "price"
resp = requests.get(url, params=data, headers=self.headers, cookies=cookies)
print resp.url
resp = resp.json()
for r in resp["data"][0]["rows"]:
month = r["month"]
try:
date = r["data"]
except:
date = 0
prices.append({"cityid": cityid, "month": month, "data": date})
# 新增房源
supplys = []
data["based"] = "supply"
resp = requests.get(url, params=data, headers=self.headers, cookies=cookies)
print resp.url
resp = resp.json()
for r in resp["data"][0]["rows"]:
month = r["month"]
try:
date = r["data"]
except:
date = 0
supplys.append({"cityid": cityid, "month": month, "data": date})
# 新增房源
slratios = []
data["based"] = "slratio"
resp = requests.get(url, params=data, headers=self.headers, cookies=cookies)
print resp.url
resp = resp.json()
for r in resp["data"][0]["rows"]:
month = r["month"]
try:
date = r["data"]
except:
date = 0
slratios.append({"cityid": cityid, "month": month, "data": date})
# 拼接数据
citydata = []
for p in prices:
ct = p["cityid"]
month = p["month"]
price = p["data"]
for s1 in supplys:
if ct == s1["cityid"] and month == s1["month"]:
supply = s1["data"]
break
for s2 in slratios:
if ct == s2["cityid"] and month == s2["month"]:
slratio = s2["data"]
break
citydata.append({"cityname": cityname, "month": month, "price": price, "supply": supply, "slratio": slratio})
# 输出数据并写书文件
fo = open("/Users/luomeng/Desktop/room.txt", "a+")
for cd in citydata:
fo.write("%s,%s,%s,%s,%s\n" % (cd["cityname"], cd["month"], cd["price"], cd["supply"], cd["slratio"]))
fo.close()
def getCity(self):
resp = requests.get("http://www.xxxx.cn//market/ajaxselectcity.html", headers=self.headers)
d = pq(resp.text)
citylist = []
i = 0
long = len(d('.citylistbox').children().children())
while i < long:
data = d('.citylistbox').children().children().eq(i)
citylist.append({"cityid": data.attr("onclick").replace("cityselecta('", "").replace("');", ""), "cityname": data.text()})
i += 1
return citylist
if __name__ == "__main__":
gra = Gra()
citylist = gra.getCity()
for city in citylist:
cityid = city["cityid"]
cityname = city["cityname"]
print cityname, "开始抓取..."
gra.grabData(cityid, cityname)
sleep(5)
【python16】爬虫
©著作权归作者所有,转载或内容合作请联系作者
- 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
- 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
- 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
推荐阅读更多精彩内容
- scrapy框架是爬虫界最为强大的框架,没有之一,它的强大在于它的高可扩展性和低耦合,使使用者能够轻松的实现更改和...
- 爬虫(Spider),反爬虫(Anti-Spider),反反爬虫(Anti-Anti-Spider) 之间恢宏壮阔...
- 通用爬虫和聚焦爬虫 根据使用场景,网络爬虫可分为 通用爬虫 和 聚焦爬虫 两种. 通用爬虫 通用网络爬虫 是 捜索...