04_异步爬虫必修课_完整异步爬取图片

# aiohttp , aiofiles
import asyncio
import aiohttp
import aiofiles



async def download(url):
print("开始下载", url)
file_name = url.split("/")[-1]
# 相当于requests
async with aiohttp.ClientSession() as session:
# 发送网络请求
async with session.get(url) as resp: # , ssl=False
# await resp.text() # resp.text() 返回的是函数 ==> resp.text
# await resp.json()
content = await resp.content.read() # ==> resp.content
# 写入文件 文件的创建,链接,写东西
async with aiofiles.open(file_name, mode="wb") as f:
await f.write(content)

print("下载完成.", url)

"""
[
"https://img08.tooopen.com/20230328/tooopen_tp_154258425883501.jpg",
"https://img08.tooopen.com/20230410/tooopen_tp_153449344972435.jpg",
"https://img08.tooopen.com/20230515/tooopen_tp_17470647628103.jpg",
]
"""
async def main():
url_list = [
"https://pic.tutu555.net/picss/2023/allimg/230711/11022401-1-4D8.jpg",
"https://pic.tutu555.net/picss/2023/allimg/230711/11022401-2-5B4.jpg",
"https://pic.tutu555.net/picss/2023/allimg/230711/11022401-3-54F.jpg",
]
tasks = []
for url in url_list:
t = asyncio.create_task(download(url))
tasks.append(t)
await asyncio.wait(tasks)


if __name__ == '__main__':
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
asyncio.run(main())
# event_loop = asyncio.get_event_loop() # 过时了
# event_loop.run_until_complete(main())





posted @ 2023-07-13 12:49  严永富  阅读(45)  评论(0)    收藏  举报