异步

greenlet 早期
yield
gevent
asyncio装饰器(py3.4async 关键字(py3.5

 

装饰器
import asyncio


async def func():
    print(1)
    await asyncio.sleep(2)  # 遇到IO耗时操作,自动切换到tasks中的其他任务
    print(2)


async def func2():
    print(3)
    await asyncio.sleep(2)
    print(4)


tasks = [
    asyncio.ensure_future(func()),
    asyncio.ensure_future(func2()),
]
loop = asyncio.get_event_loop()
# loop.run_until_complete(func())
loop.run_until_complete(asyncio.wait(tasks))

 

import asyncio


@asyncio.coroutine
def func():
    print(1)
    yield from asyncio.sleep(2)  # 遇到IO耗时操作,自动切换到tasks中的其他任务
    print(2)


@asyncio.coroutine
def func2():
    print(3)
    yield from asyncio.sleep(2)
    print(4)


tasks = [
    asyncio.ensure_future(func()),
    asyncio.ensure_future(func2()),
]
loop = asyncio.get_event_loop()
# loop.run_until_complete(func())
loop.run_until_complete(asyncio.wait(tasks))

 

 async/await
import asyncio
import aiohttp
 
from bs4 import BeautifulSoup
 
async def fetch_content(url):
    async with aiohttp.ClientSession(
        headers=header, connector=aiohttp.TCPConnector(ssl=False)
    ) as session:
        async with session.get(url) as response:
            return await response.text()
 
async def main():
    url = "https://movie.douban.com/cinema/later/beijing/"
    init_page = await fetch_content(url)
    init_soup = BeautifulSoup(init_page, 'lxml')
 
    movie_names, urls_to_fetch, movie_dates = [], [], []
 
    all_movies = init_soup.find('div', id="showing-soon")
    for each_movie in all_movies.find_all('div', class_="item"):
        all_a_tag = each_movie.find_all('a')
        all_li_tag = each_movie.find_all('li')
 
        movie_names.append(all_a_tag[1].text)
        urls_to_fetch.append(all_a_tag[1]['href'])
        movie_dates.append(all_li_tag[0].text)
 
    tasks = [fetch_content(url) for url in urls_to_fetch]
#tasks = [asyncio.create_task(fetch_content(url)) for site in urls_to_fetch]# 3.7以后才有的方法;如果是之前的版本,可以用ensure_future代替 pages = await asyncio.gather(*tasks)
for movie_name, movie_date, page in zip(movie_names, movie_dates, pages): soup_item = BeautifulSoup(page, 'lxml') img_tag = soup_item.find('img') print('{} {} {}'.format(movie_name, movie_date, img_tag['src'])) asyncio.run(main())# 新版本

老版本
loop = asyncio.get_event_loop() try: loop.run_until_complete(main) finally: loop.close()

 

 

执行方式的对比
import asyncio
 
async def worker_1():
    print('worker_1 start')
    await asyncio.sleep(1)
    print('worker_1 done')
 
async def worker_2():
    print('worker_2 start')
    await asyncio.sleep(2)    print('worker_2 done')
 
async def main():
    print('before await')
    await worker_1()
    print('awaited worker_1')
    await worker_2()
    print('awaited worker_2')
 
%time asyncio.run(main())
 
########## 输出 ##########
 
before await
worker_1 start
worker_1 done
awaited worker_1
worker_2 start
worker_2 done
awaited worker_2
Wall time: 3 s

 

import asyncio
 
async def worker_1():
    print('worker_1 start')
    await asyncio.sleep(1)
    print('worker_1 done')
 
async def worker_2():
    print('worker_2 start')
    await asyncio.sleep(2)
    print('worker_2 done')
 
async def main():
    task1 = asyncio.create_task(worker_1())
    task2 = asyncio.create_task(worker_2())
    print('before await')
    await task1
    print('awaited worker_1')
    await task2
    print('awaited worker_2')
 
%time asyncio.run(main())
 
########## 输出 ##########
 
before await
worker_1 start
worker_2 start
worker_1 done
awaited worker_1
worker_2 done
awaited worker_2
Wall time: 2.01 s

 

主动取消任务
import asyncio


async def worker_1():
    await asyncio.sleep(1)
    return 1


async def worker_2():
    await asyncio.sleep(2)
    return 2 / 0


async def worker_3():
    await asyncio.sleep(3)
    return 3


async def main():
    task_1 = asyncio.create_task(worker_1())
    task_2 = asyncio.create_task(worker_2())
    task_3 = asyncio.create_task(worker_3())
    await asyncio.sleep(2)
    task_3.cancel()
    res = await asyncio.gather(task_1, task_2, task_3, return_exceptions=True)  # 避免 Exception 抛到执行层
    print(res)


asyncio.run(main())
posted @ 2020-05-22 13:54  慕沁  阅读(141)  评论(0)    收藏  举报