import threading
import json
import requests
from lxml import etree
from Queue import Queue
import time
class ThreadCrawl(threading.Thread):
def __init__(self, threadname, pageQueue, dataQueue):
super(ThreadCrawl, self).__init__()
self.threadname = threadname
self.pageQueue = pageQueue
self.dataQueue = dataQueue
self.headers = {"User-Agent": "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0;"}
def run(self):
print "启动"+self.threadname
while not CRAWL_EXIT:
try:
page = self.pageQueue.get(False)
url = "https://www.qiushibaike.com/8hr/page/"+str(page)+"/"
content = requests.get(url, headers=self.headers).text
time.sleep(1)
self.dataQueue.put(content)
except:
pass
print "结束"+self.threadname
class ThreadParse(threading.Thread):
def __init__(self, threadname, dataQueue, filename, lock):
super(ThreadParse, self).__init__()
self.threadname = threadname
self.dataQueue = dataQueue
self.filename= filename
self.lock = lock
def run(self):
while not PARSE_EXIT:
try:
html = self.dataQueue.get(False)
self.parse(html)
except:
pass
def parse(self,html):
html = etree.HTML(html)
result = html.xpath('//div[contains(@id,"qiushi_tag")]')
for res in result:
username = res.xpath('.//img/@alt')[0]
pic = res.xpath('./div/a/img/@src')
duanzi = res.xpath('.//div[@class="content"]/span')[0].text.strip()
zan = res.xpath('.//i')[0].text
comment = res.xpath('.//i')[1].text
items = {
"username": username,
"image": pic,
"content": duanzi,
"zan": zan,
"comment": comment
}
with self.lock:
self.filename.write(json.dumps(items, ensure_ascii=False).encode('utf=8')+"\n")
CRAWL_EXIT = False
PARSE_EXIT = False
def main():
pageQueue = Queue(20)
for i in range(1, 21):
pageQueue.put(i)
dataQueue = Queue()
filename = open("duanzi.json", "a")
lock = threading.Lock()
crawlList = ["采集线程1号", "采集线程2号", "采集线程3号"]
threadcrawl = []
for threadname in crawlList:
thread = ThreadCrawl(threadname, pageQueue, dataQueue)
thread.start()
threadcrawl.append(thread)
parseList = ["解析线程1号", "解析线程2号", "解析线程3号"]
threadparse = []
for threadname in parseList:
thread = ThreadParse(threadname, dataQueue, filename, lock)
thread.start()
threadparse.append(thread)
while not pageQueue.empty():
pass
global CRAWL_EXIT
CRAWL_EXIT = True
for thread in threadcrawl:
thread.join()
print "1"
while not dataQueue.empty():
pass
global PARSE_EXIT
PARSE_EXIT = True
for thread in threadparse:
thread.join()
print "2"
with lock:
filename.close()
print "谢谢使用"
if __name__ == "__main__":
main()
多线程爬糗事百科
©著作权归作者所有,转载或内容合作请联系作者
- 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
- 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
- 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
推荐阅读更多精彩内容
- 多线程糗事百科案例 案例要求参考上一个糗事百科单进程案例 Queue(队列对象) Queue是python中的标准...