1 from concurrent.futures import ThreadPoolExecutor
2 import requests
3
4 URLS = ['http://www.baidu.com', 'http://qq.com', 'http://sina.com']
5
6
7 def task(url, timeout=10):
8 return requests.get(url, timeout=timeout)
9
10
11 pool = ThreadPoolExecutor(max_workers=3)
12 '''
13 map()方法
14 除了submit,Exectuor还为我们提供了map方法,这个方法返回一个map(func, *iterables)迭代器,迭代器中的回调执行返回的结果有序的
15 '''
16 results = pool.map(task, URLS)
17 for result in results:
18 # print('{0},{1}'.format(result.url,len(result.content)))
19 print('%s,%s' % (result.url, len(result.content)))