#threading 模块
'''
cpython全局解释器锁导致同时只能有一个线程执行python,利用多cpu执行cpu密集型任务使用多进程,密集io型可以使用多线程并发
class threading.Thread(group=None, target=None, name=None, args=(), kwargs={}, *, daemon=None
Thread 类代表一个在独立控制线程中运行的活动,当线程对象一旦被创建,其活动必须通过调用线程的 start() 方法开始。 这会在独立的控制线程中发起调用 run() 方法。
一旦线程活动开始,该线程会被认为是 '存活的' 。当它的 run() 方法终结了(不管是正常的还是抛出未被处理的异常),就不是'存活的'。 is_alive() 方法用于检查线程是否存活。
其他线程可以调用一个线程的 join() 方法。这会阻塞调用该方法的线程,直到被调用 join() 方法的线程终结。
线程锁Lock 线程递归锁Rlock
'''
from threading import Thread, Lock
import threading, time
# L1 = Lock() 锁放在全局所有线程共用这把锁
def test(var1, var2='var2'):
L1 = Lock() # 锁放在任务方法里面实际是创建了多把锁,线程之间的锁互不影响
L1.acquire()
print('this is a threading fun test var1 is %s, var2 is %s ' % (var1, var2), end='\n')
time.sleep(10)
L1.release()
print(threading.current_thread(), end='\n')
print(threading.get_native_id(), end='\n')
print(threading.main_thread(), end='\n')
t1 = Thread(target=test, args=('t1_var1',), kwargs={'var2': 't1_var2'})
t2 = Thread(target=test, args=('t2_var2',), kwargs={'var2': 't2_var2'})
print('start_time is %s' % time.time())
t1.start()
print('this is t1.is_alive value %s' % t1.is_alive(), end='\n')
print('this is t1.is_alive value %s' % t1.is_alive(), end='\n')
t2.start()
t2.join()
print('end_time is %s' % time.time())
#multiprocessing
'''
在 multiprocessing 中,通过创建一个 Process 对象然后调用它的 start() 方法来生成进程。 Process 和 threading.Thread API 相同
'''
from multiprocessing import Process, Pool, TimeoutError
import time, os
def multiprocess_test(x):
print('this is subprocess %s start_time %s' % ('Process.pid', time.time()))
print('main process id :', os.getppid())
print('sub process id:', os.getpid())
print('x square is ', x**2)
time.sleep(2)
print('this is subprocess %s end_time %s' % ('Process.pid', time.time()))
return x**2
#https://blog.csdn.net/qq_43580193/article/details/105924104 win多线程调试特殊的地方
def mutithreading_square(y):
print('threading info',threading.get_native_id(), 'start_time', time.time() )
time.sleep(5)
print('%d square is %d' % (y, y**2))
print('threading info', threading.get_native_id(), 'end_time', time.time())
def muti_pro_threading_test(y):
with ThreadPoolExecutor(max_workers=5) as executor_Th:
print('this is subprocess %s start_time %s' % (os.getpid(), time.time()))
executor_Th.map(mutithreading_square, range(11, 15))
# 启动并行任务
'''
submit(fn, /, *args, **kwargs)调度可调用对象 fn,以 fn(*args, **kwargs) 方式执行并返回一个代表该可调用对象的执行的
map(fn, *iterables, timeout=None, chunksize=1)
类似于 map(fn, *iterables) 但有以下差异:iterables 是立即执行而不是延迟执行的;fn 是异步执行的并且可以并发对 fn 的多个调用。
'''
from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
# subprocess是使用子进程调用系统命令的推荐
if __name__ == '__main__':
# 多进程process方法
# p1 = Process(target=multiprocess_test, args=())
# p2 = Process(target=multiprocess_test, args=())
# p1.start()
# p2.start()
# 多进程pool方法,自己成在win上debug时多进程启动会有大约0.01s的耗时,返回值时所有进程返回值组成的列表
# with Pool(processes=3) as pool:
# pool.map调用方式
# pool_res = pool.map(multiprocess_test, range(10))
# print('this is pool_res', pool_res, 'this is running time', time.time())
# for循环调用方式,i是每个进程调用函数的返回值
# for i in pool.imap_unordered(multiprocess_test, range(10)):
# print('this is i', i)
# apply_async调用方法,异步运行一个进程,没有参数map到fun的功能
# pool_res1 = pool.apply_async(multiprocess_test, (20,))
# print('pool_res1', pool_res1.get(), 'this is running time', time.time())
# apply_async调用方法,异步运行多个进程方法
# pool_reses = [pool.apply_async(multiprocess_test, (x,)) for x in range(10)]
# print([res.get() for res in pool_reses])
# concurrent.futures 启动并行任务,启动多线程 启动多进程
# with ThreadPoolExecutor(max_workers=5) as executor_Th:
# executor_Th.map(multiprocess_test, range(10))
# with ProcessPoolExecutor(max_workers=5) as executor_Pro:
# executor_Pro.map(multiprocess_test, range(10))
# 多进程多线程调用测试
with ProcessPoolExecutor(max_workers=5) as executor_Pro:
executor_Pro.map(muti_pro_threading_test, range(5))