# multiprocessing.py
import os
print 'Process (%s) start...' % os.getpid()
pid = os.fork()
if pid==0:
print 'I am child process (%s) and my parent is %s.' % (os.getpid(), os.getppid())
else:
print 'I (%s) just created a child process (%s).' % (os.getpid(), pid)
#!/usr/local/python27/bin/python2.7coding=utf8noinspection PyUnresolvedReferences
from multiprocessing import Process
import time
def f(n):
time.sleep(1)
print nn
for i in range(10):
p = Process(target=f,args=[i,])
p.start()
#!/usr/local/python27/bin/python2.7coding=utf8noinspection PyUnresolvedReferences通过多进程和多线程对比,进程间内存无法共享,线程间的内存共享
from multiprocessing import Process
import threading
import time
lock = threading.Lock()
def run(info_list,n):
lock.acquire()
info_list.append(n)
lock.release()
print('%s\n' % info_list)
info = []
for i in range(10):
'''target为子进程执行的函数,args为需要给函数传递的参数'''
p = Process(target=run,args=[info,i])
p.start()
'''这里是为了输出整齐让主进程的执行等一下子进程'''
time.sleep(1)
print('------------threading--------------')
for i in range(10):
p = threading.Thread(target=run,args=[info,i])
p.start()
#!/usr/local/python27/bin/python2.7coding=utf8noinspection PyUnresolvedReferences通过multiprocessing.Queue实现进程间内存共享
from multiprocessing import Process,Queue
import time
def write(q):
for i in ['A','B','C','D','E']:
print('Put %s to queue' % i)
q.put(i)
time.sleep(0.5)
p = Process(target=f,args=(num,arr,raw_list))
p.start()
p.join()
print(num.value)
print(arr[:])
print(raw_list)
执行结果: Manager
#!/usr/local/python27/bin/python2.7coding=utf8noinspection PyUnresolvedReferences
from multiprocessing import Process,Manager
def f(d,l):
d[1] = '1'
d['aa'] = 'hello World'
l.reverse()
if __name__ == '__main__':
manager = Manager()
d = manager.dict()
l = manager.list(range(10))
p = Process(target=f,args=(d,l))
p.start()
p.join()
print(d)
print(l)
执行结果: Pool (进程池)
用于批量创建子进程,可以灵活控制子进程的数量
#!/usr/local/python27/bin/python2.7coding=utf8noinspection PyUnresolvedReferences
from multiprocessing import Pool
import time
def f(x):
print xx
time.sleep(2)
return x*x
'''定义启动的进程数量'''
pool = Pool(processes=5)
res_list = []
for i in range(10):
'''以异步并行的方式启动进程,如果要同步等待的方式,可以在每次启动进程之后调用res.get()方法,也可以使用Pool.apply'''
res = pool.apply_async(f,[i,])
print('-------:',i)
res_list.append(res)
pool.close()
pool.join()
for r in res_list:
print(r.get(timeout=5))
#!/usr/local/python27/bin/python2.7coding=utf8noinspection PyUnresolvedReferences
from threading import Thread
import time
def f(n):
time.sleep(1)
num = n*n
print('%s\n' % num)
l1 = range(10)
for i in l1:
p = Thread(target=f,args=(i,))
p.start()
#!/usr/local/python27/bin/python2.7coding=utf8noinspection PyUnresolvedReferences
import time, threading
balance = 0
def change_it(n):
# 先加后减,结果应该为0:
global balance
balance = balance + n
balance = balance - n
def run_thread(n):
for i in range(100000):
change_it(n)