python之multiprocessing

1、multiprocessing简介

  multiprocessing模块提供本地和远程并发性,通过使用子进程而不是线程来有效地避开全局解释器锁。由于这个原因,多处理模块允许程序员在给定的机器上充分利用多个处理器。它在Unix和Windows上运行。

2、进程的创建

2.1、创建一个进程

import multiprocessing,time


def run(name):
    time.sleep(2)
    print("hello", name)

if __name__ == "__main__":
    p = multiprocessing.Process(target=run, args=('bob',))
    p.start()
    p.join()

2.2、创建多个进程

import multiprocessing,time


def run(name):
    time.sleep(2)
    print("hello", name)

if __name__ == "__main__":
    for i in range(10):
        p = multiprocessing.Process(target=run, args=('bob %s' % i,))
        p.start()
#         p.join()

2.3、进程中创建线程

import multiprocessing,time,threading

def thread_run():
    print("threading id :%s" %threading.get_ident())

def run(name):
    time.sleep(2)
    print("hello", name)
    t = threading.Thread(target=thread_run,)
    t.start()

if __name__ == "__main__":
    for i in range(10):
        p = multiprocessing.Process(target=run, args=('bob %s' % i,))
        p.start()

2.4、获取进程id

from multiprocessing import Process
import os


def info(title):
    print(title)
    print('module name:', __name__)
    print('parent process:', os.getppid())
    print('process id:', os.getpid())
    print("\n\n")


def f(name):
    info('\033[31;1mcalled from child process function f\033[0m')
    print('hello', name)

if __name__ == '__main__':
    info('\033[32;1mmain process line\033[0m')

  输出结果:

main process line
module name: __main__
parent process: 6024
process id: 30756

2.5、获取进程id和子进程id

from multiprocessing import Process
import os


def info(title):
    print(title)
    print('module name:', __name__)
    print('parent process:', os.getppid())
    print('process id:', os.getpid())
    print("\n\n")


def f(name):
    info('\033[31;1mcalled from child process function f\033[0m')
    print('hello', name)

if __name__ == '__main__':
    info('\033[32;1mmain process line\033[0m')
    p = Process(target=f, args=('bob',))
    p.start()
    # p.join()

  输出结果:

main process line
module name: __main__
parent process: 6024
process id: 30756

3、进程间通信

  不同进程间内存是不共享的,要实现两个进程间的数据交换,可以使用一下方法:Queue,Pipes,Manager

3.1、Queue

from multiprocessing import Process, Queue

def f(q):
    q.put([42, None, 'hello'])

if __name__ == '__main__':
    q = Queue()
    p = Process(target=f, args=(q,))
    p.start()
    
    print(q.get())

3.2、Pipes

from multiprocessing import Process, Pipe


def f(conn):
    conn.send([42, None, 'hello from child'])
    conn.send([42, None, 'hello from child3'])
    print("",conn.recv())
    conn.close()


if __name__ == '__main__':
    parent_conn, child_conn = Pipe()
    p = Process(target=f, args=(child_conn,))
    p.start()
    print("parent",parent_conn.recv())  # prints "[42, None, 'hello']"
    print("parent",parent_conn.recv())  # prints "[42, None, 'hello']"
    parent_conn.send(" from hshs")  # prints "[42, None, 'hello']"
    p.join()

3.3、Manager

from multiprocessing import Process, Manager
import os
def f(d, l):
    d[os.getpid()] =os.getpid()
    l.append(os.getpid())
    print(l)

if __name__ == '__main__':
    with Manager() as manager:
        d = manager.dict() #{} #生成一个字典,可在多个进程间共享和传递

        l = manager.list(range(5))#生成一个列表,可在多个进程间共享和传递
        p_list = []
        for i in range(10):
            p = Process(target=f, args=(d, l))
            p.start()
            p_list.append(p)
        for res in p_list: #等待结果
            res.join()

        print(d)
        print(l)

3.4、进程同步

from multiprocessing import Process, Lock


def f(l, i):
    l.acquire()
    print('hello world', i)
    l.release()


if __name__ == '__main__':
    lock = Lock()

    for num in range(100):
        Process(target=f, args=(lock, num)).start()

4、进程池

4.1、串行:apply

from  multiprocessing import Process, Pool,freeze_support
import time
import os

def Foo(i):
    time.sleep(2)
    print("in process",os.getpid())
    return i + 100

def Bar(arg):
    print('-->exec done:', arg,os.getpid())

if __name__ == '__main__':
    #freeze_support()
    pool = Pool(processes=3) #允许进程池同时放入5个进程
    print("主进程",os.getpid())
    for i in range(10):
        pool.apply(func=Foo, args=(i,)) #串行
    print('end')
    pool.close()
    pool.join() #进程池中进程执行完毕后再关闭,如果注释,那么程序直接关闭。.join()

4.2、并行:apply_async

from  multiprocessing import Process, Pool,freeze_support
import time
import os

def Foo(i):
    time.sleep(2)
    print("in process",os.getpid())
    return i + 100

def Bar(arg):
    print('-->exec done:', arg,os.getpid())

if __name__ == '__main__':
    #freeze_support()
    pool = Pool(processes=3) #允许进程池同时放入5个进程
    print("主进程",os.getpid())
    for i in range(10):
        pool.apply_async(func=Foo, args=(i,), callback=Bar) #callback=回调
        #pool.apply_async(func=Foo, args=(i,)) #串行
    print('end')
    pool.close()
    pool.join() #进程池中进程执行完毕后再关闭,如果注释,那么程序直接关闭。.join()

 

posted @ 2018-10-01 12:43  RobotsRising  阅读(261)  评论(0编辑  收藏  举报