# 本程序亲测有效,用于理解爬虫相关的基础知识,不足之处希望大家批评指正
 import requests
from lxml import etree
from multiprocessing import JoinableQueue as Queue
from multiprocessing import Process """爬取目标:http://www.qiushibaike.com/8hr/page/1
利用多进程实现
""" class QiuShi:
def __init__(self):
# url和headers
self.base_url = 'http://www.qiushibaike.com/8hr/page/{}'
self.headers = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36' # 定义队列,用来传递数据
self.url_queue = Queue()
self.request_queue = Queue()
self.html_queue = Queue() def get_url_list(self):
"""获取所有的url"""
for i in range(1, 14):
target_url = self.base_url.format(i)
print(target_url)
# 计数需要请求的url
self.url_queue.put(target_url) def request_url(self):
"""向url发起请求"""
while True:
target_url = self.url_queue.get()
response = requests.get(target_url, self.headers)
print(response)
self.request_queue.put(response)
self.url_queue.task_done() def get_content(self):
"""获取数据"""
while True:
html_text = self.request_queue.get().content.decode()
html = etree.HTML(html_text)
div_list = html.xpath('//div[@id="content-left"]/div')
content_list = []
for div in div_list:
item = {}
item['author'] = div.xpath('.//h2/text()')[0].strip()
item['content'] = div.xpath('.//span/text()')[0].strip()
print(item)
content_list.append(item)
self.html_queue.put(content_list)
self.request_queue.task_done() def save_data(self):
"""保存入库"""
while True:
data_list = self.html_queue.get()
for data in data_list:
with open('qiushi.text', 'a+') as f:
f.write(str(data))
f.write('\r\n')
self.html_queue.task_done() def main(self): # 获取所有url
self.get_url_list()
# 创建一个进程盒子,用于收集进程
process_list = []
p_request = Process(target=self.request_url)
process_list.append(p_request) p_content = Process(target=self.get_content)
process_list.append(p_content) p_save_data = Process(target=self.save_data)
process_list.append(p_save_data) # 让所有进程跑起来
for process in process_list:
process.daemon = True # 设置为守护进程:主进程结束,子进程任务完不完成,都要随着主进程结束而结束
process.start() # 等待进程任务完成,回收进程 director:主任:可以理解为普通员工都下班了,领导才能下班
for director in [self.url_queue,self.request_queue,self.html_queue]:
director.join() if __name__ == '__main__':
qiushi = QiuShi()
qiushi.main()
05-26 21:03