评论链接
放入excel(negkey)中的内容
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 16 14:52:09 2018
@author: Shirley
"""
#好评链接:http://www.dianping.com/shop/2044996/review_all/p2?queryType=reviewGrade&queryVal=good
#差评链接:http://www.dianping.com/shop/2044996/review_all/p2?queryType=reviewGrade&queryVal=bad
import requests
from lxml import etree
import xlrd
import random
import time
data = []
def Comments(url):
headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36',
'Cookie': '浏览器获取的cookie'
}
#url = 'http://www.dianping.com/shop/19110508/review_all?queryType=reviewGrade&queryVal=bad'
resp = requests.get(url,headers=headers)
#print (resp.content.decode('utf-8'))#查看爬取网页代码,代码中文内容用二进制表示,故print时要decode
comments = etree.HTML(resp.text,parser=etree.HTMLParser(encoding='utf-8'))
commentlong = comments.xpath('//div[@class="review-words Hide"]')#长评
commentshort = comments.xpath('//div[@class="review-words"]')#短评
for l in commentlong:
data.append(l.xpath('string(.)').replace("\t","").replace("\n","").replace(" ","").replace("收起评论","").encode('gbk','ignore').decode('gbk'))
#把不需要的字符清除,可以用split()去掉\xa0,\t,\n,但是句子会被拆分成一个列表,不方便后续使用
for s in commentshort:
data.append(s.xpath('string(.)').replace("\t","").replace("\n","").replace(" ","").encode('gbk','ignore').decode('gbk'))
def getComments():
path = 'D:/anaconda/shirleylearn/dazhongdianping/negkey.xlsx'#抓取关键字
excelfile = xlrd.open_workbook(path)
keys = excelfile.sheet_by_name('Sheet1')
n = keys.nrows
for i in range(0,n):
key = int(keys.row(i)[0].value)
for page in range(1,int(keys.row(i)[1].value)+1):
url = 'http://www.dianping.com/shop/%d/review_all/p%d?queryType=reviewGrade&queryVal=bad'%(key,page)#拼链接,修改bad,good即为差评和好评
#print(url)
Comments(url)
time.sleep(random.random())
if __name__ == "__main__":
getComments()
print (len(data))
with open('neg.txt','w') as f:#把评论放入txt,好评改为pos.txt
for k in data:
f.write(k)
抓取结果展示
爬一次IP就被封了 o(╥﹏╥)o