1.多进程multiprocessing
- python多线程 不适合cpu密集操作型的任务,适合io操作密集型的任务,就是大量的高密度运算,多线程不一定提高效率。多线程适合轻量级多个任务。
#python多线程 不适合cpu密集操作型的任务,适合io操作密集型的任务,就是大量的高密度运算,多线程不一定提高效率。多线程适合轻量级多个任务。 import multiprocessing #多进程 import threading #多线程 def thread_run(index_process,index_thread): print("进程:",index_process,"线程:",index_thread," thread id:",threading.get_ident()) #一个进程下多线程 def run(index_process): print('hello', index_process) tlist=[] nthread=10 for i in range(nthread): t = threading.Thread(target=thread_run,args=(index_process,i)) tlist.append(t) for i in range(nthread): tlist[i].start() for i in range(nthread): tlist[i].join() print(__name__) if __name__ == '__main__': plist=[] pnum=8 for i in range(pnum): #设置多进程 p = multiprocessing.Process(target=run, args=(i,)) plist.append(p) for i in range(pnum): plist[i].start() for i in range(pnum): plist[i].join()
2. 子父进程id
from multiprocessing import Process import os #每一个子进程都是由父进行启动的 def info(title): print(title) print('module name:', __name__) print('parent process:', os.getppid()) #这里指pycharm的进程pid print('process id:', os.getpid()) print("\n\n") def f(name): info('\033[31;1mcalled from child process function f\033[0m') print('hello', name) if __name__ == '__main__': info('\033[32;1mmain process line\033[0m') p = Process(target=f, args=('bob',)) p.start() # p.join()
3.进程通信队列queue,与线程queue不同
#进程queue与线程queue不同 from multiprocessing import Process, Queue #这与下面的queue不同 def f(qq): print("in child:",qq.qsize()) qq.put([42, None, 'hello']) if __name__ == '__main__': q = Queue() q.put("test123") p = Process(target=f, args=(q,)) p.start() p.join() print("444",q.get_nowait()) print("444",q.get_nowait())
4.进程通信管道
#管道Pipe建立两个对象,可以操作这两个对象间通信 from multiprocessing import Process, Pipe def f(conn): conn.send([42, None, 'hello from child']) conn.send([42, None, 'hello from child2']) print("from parent:",conn.recv()) conn.close() if __name__ == '__main__': parent_conn, child_conn = Pipe() p = Process(target=f, args=(child_conn,)) p.start() print(parent_conn.recv()) # prints "[42, None, 'hello']" print(parent_conn.recv()) # prints "[42, None, 'hello']" parent_conn.send("ABCDE") # prints "[42, None, 'hello']" p.join()
5.进程间共享
from multiprocessing import Process, Manager import os def f(d, l): d[os.getpid()] =os.getpid() l.append(os.getpid()) print(l) if __name__ == '__main__': #这里不用加锁,已经默认加锁了,即使不用with with Manager() as manager: d = manager.dict() #{} #生成一个字典,可在多个进程间共享和传递 l = manager.list(range(5))#生成一个列表,可在多个进程间共享和传递 p_list = [] for i in range(10): p = Process(target=f, args=(d, l)) p.start() p_list.append(p) for res in p_list: #等待结果 res.join() print(d) print(l)
6.进程锁,目的在于打印时不会乱,不会出现打印插值
#进程锁目的在于打印时不会乱,不会出现打印插值 from multiprocessing import Process, Lock def f(l, i): #加上锁,i的顺序依然是乱的,但是print内容不会错乱 l.acquire() print('hello world', i) l.release() if __name__ == '__main__': lock = Lock() for num in range(100): Process(target=f, args=(lock, num)).start()
7.进程池,用于限制一次性加载的进程数,防止一次性暴力加载的进程数过多导致cpu瘫痪
#进程池用于限制一次性加载的进程数,防止一次性暴力加载的进程数过多,让cpu瘫痪。 from multiprocessing import Process, Pool,freeze_support import os,time def Foo(i): time.sleep(0.5) print("in process",os.getpid()) return i + 100 def Bar(arg): print('-->exec done:', arg,os.getpid()) if __name__ == '__main__': #freeze_support() pool = Pool(processes=2) #允许进程池同时放入5个进程 print("主进程",os.getpid()) for i in range(10): pool.apply_async(func=Foo, args=(i,), callback=Bar) #callback=回调 #pool.apply(func=Foo, args=(i,)) #串行 #pool.apply_async(func=Foo, args=(i,)) #串行 print('end') pool.close() #先要把关闭句柄声明,再进入join等待才可以 pool.join() #进程池中进程执行完毕后再关闭,如果注释,那么程序直接关闭。.join()