能夠適當的使用,在大量的IO狀況下有更好的方法服務器
import time from multiprocessing.dummy import Pool def request(url): print('正在下載->',url) time.sleep(2) print('下載完畢->',url) start = time.time() urls = [ 'www.baidu.com', 'www.taobao.com', 'www.sougou.com' ] pool = Pool(3) pool.map(request,urls) print('總耗時->',time.time()-start)
event_loop:至關於無線循環,咱們能夠把特殊的函數註冊到這個時間循環上,異步執行 coroutine:協程,就是被async修飾的函數 task:任務,它是對協程進一步封裝,包含了協程的各個狀態 future:將要執行的任務 async/await,這兩個是須要重點了解的
import asyncio async def hello(name): print('hello->'+ name) # 獲取一個協程對象
c = hello('attila') # 建立一個事件循環
loop = asyncio.get_event_loop() # 將協程對象註冊到事件循環中,而且啓動事件循環對象
loop.run_until_complete(c)
import asyncio async def hello(name): print('hello->'+ name) # 獲取一個協程對象
c = hello('attila') # 建立一個事件循環
loop = asyncio.get_event_loop() # 把協程封裝到task中
task = loop.create_task(c) print(task) # Task pending # 將協程對象註冊到事件循環中,而且啓動事件循環對象
loop.run_until_complete(task) print(task) # Task finished
import asyncio async def hello(name): print('hello->'+ name) # 獲取一個協程對象
c = hello('attila') # 把協程封裝到task中
task = asyncio.ensure_future(c) # 將協程對象註冊到事件循環中,而且啓動事件循環對象
loop.run_until_complete(task)
import asyncio def call_back(task): print('---->',task.result()) async def hello(name): print('hello->'+ name) return name # 獲取一個協程對象
c = hello('attila') # 把協程封裝到task中
task = asyncio.ensure_future(c) # 給任務綁定一個回調函數,這個call_back裏面的參數就是綁定回到函數的task
task.add_done_callback(call_back) # 將協程對象註冊到事件循環中,而且啓動事件循環對象
loop.run_until_complete(task)
import aiohttp import asyncio async def get_page(url): async with aiohttp.ClientSession() as session: async with await session.get(url=url) as response: # 只要有io操做的地方就要掛起(await)
page_text = await response.text() print(page_text) start = time.time() # 這裏的url是本身在後臺搭建的服務器,沒給url都是time.sleep(2)
urls = [ 'http://127.0.0.1:5000/cat', 'http://127.0.0.1:5000/dog', 'http://127.0.0.1:5000/monkey', 'http://127.0.0.1:5000/cat', 'http://127.0.0.1:5000/dog', 'http://127.0.0.1:5000/monkey', 'http://127.0.0.1:5000/cat', 'http://127.0.0.1:5000/dog', 'http://127.0.0.1:5000/monkey', ] tasks = [] loop = asyncio.get_event_loop() for url in urls: c = get_page(url) task = asyncio.ensure_future(c) tasks.append(task) loop.run_until_complete(asyncio.wait(tasks)) print('總耗時->',time.time()-start) # 總耗時-> 2.053046464920044