import asyncio
import multiprocessing
from concurrent.futures.thread import ThreadPoolExecutor
from datetime import datetime
from time import sleep
import aiohttp
import requests
def request_url(n):
rep = requests.get("https://www.baidu.com")
return {"result": rep, "n": n}
def main_map():
start_timestamp = datetime.now()
thread_pool = ThreadPoolExecutor(max_workers=40, thread_name_prefix="test_")
res = thread_pool.map(request_url, range(10000))
thread_pool.shutdown(wait=True)
for re in res:
print(re)
print("time cost:" + str(datetime.now() - start_timestamp))
def main_submit():
start_timestamp = datetime.now()
thread_pool = ThreadPoolExecutor(max_workers=40, thread_name_prefix="test_")
task_list = []
for i in range(20000):
res = thread_pool.submit(request_url, i)
task_list.append(res)
thread_pool.shutdown()
for re in task_list:
print(re.result())
print("time cost:" + str(datetime.now() - start_timestamp))
def _main_multiprocess():
start_timestamp = datetime.now()
print("CPU内核数:{}".format(multiprocessing.cpu_count()))
pool = multiprocessing.Pool()
task_list = []
for i in range(2000):
res = pool.apply_async(func=request_url, args=(i,))
task_list.append(res)
pool.close()
pool.join()
for re in task_list:
print(re.get())
print("time cost:" + str(datetime.now() - start_timestamp))
def request_url_thread_pool(n):
start_timestamp = datetime.now()
thread_pool = ThreadPoolExecutor(max_workers=40, thread_name_prefix="test_")
task_list = []
for i in range(n):
res = thread_pool.submit(request_url, i)
task_list.append(res)
thread_pool.shutdown()
for re in task_list:
print(re.result())
print("time cost:" + str(datetime.now() - start_timestamp))
def main_multiprocess(n):
start_timestamp = datetime.now()
print("CPU内核数:{}".format(multiprocessing.cpu_count()))
count = multiprocessing.cpu_count()
pool = multiprocessing.Pool()
task_list = []
for i in range(n):
res = pool.apply_async(func=request_url_thread_pool, args=(int(n / count),))
task_list.append(res)
pool.close()
pool.join()
for re in task_list:
print(re.get())
print("time cost:" + str(datetime.now() - start_timestamp))
async def fetch_sub(i, semaphore):
async with semaphore:
async with aiohttp.ClientSession() as client:
async with client.get("http://ad.partner.gifshow.com/track/activate?callback=zs-MVxyan0NeQX-GiQF2wBA1DfDUlBr9ylsXIFCdV24SOApPQb1-8YLgyiGg6jaEkEdxjtQuil7Z4vmJHtIsBnkUhS6AicDzki9LNOFEvkLNVI1qi8ximodInWFChyqj4c_oy0mOi0YRw_EwSjmAlviTniM5I51gJkie_U5e1AL-2XzvUjoQojNG5jzlAa&word=" + str(i)) as resp:
return await resp.json()
if __name__ == '__main__':
_main_multiprocess()
# main_submit()
# main_map()
# main_multiprocess(20000)
start_timestamp = datetime.now()
loop = asyncio.get_event_loop()
semaphore = asyncio.Semaphore(500) # 限制并发量为500
task_list = [asyncio.ensure_future(fetch_sub(i, semaphore)) for i in range(2000)]
loop.run_until_complete(asyncio.wait(task_list))
for re in task_list:
print(re.result())
print("time cost:" + str(datetime.now() - start_timestamp))
python 多进程/多线程/协程
©著作权归作者所有,转载或内容合作请联系作者
- 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
- 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
- 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
推荐阅读更多精彩内容
- 目录 只用request爬取 用多线程爬取 用多进程爬取 用协程爬取 用协程+多进程爬取 1. 只用request...
- 一、多进程 1、子进程(subprocess包) 在python中,通过subprocess包,fork一个子进程...
- 转载请注明:陈熹 chenx6542@foxmail.com (简书号:半为花间酒)若公众号内转载请联系公众号:早...
- 1.笨栗子就是对多进程中调用协程,pool.apply_async(asyncio.get_event_loop(...
- 场景单进程,两线程,两进程,两协程 数20万的场景对比单进程:单个进程count 20万两线程:启动两个线程,对全...