首页 > 技术文章 > 单线程+多任务+异步协程爬虫模块

well-666 2020-05-08 22:15 原文

import requests
import aiohttp
import time
import asyncio
s = time.time()
urls = [
'http://127.0.0.1:5000/bobo',
'http://127.0.0.1:5000/jay'
]

# async def get_request(url):
# page_text = requests.get(url).text
# return page_text
async def get_request(url):
async with aiohttp.ClientSession() as s:
async with await s.get(url=url) as response:
page_text = await response.text()
print(page_text)
return page_text
tasks = []
for url in urls:
c = get_request(url)
task = asyncio.ensure_future(c)
tasks.append(task)

loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait(tasks))

print(time.time()-s)

推荐阅读