我正在使用多处理函数进行一些测试,在调用连接函数后,我面临一个冻结问题
import time
import math
import pandas as pd
import numpy as np
from multiprocessing import Process, Queue, Lock, Value
def writeDF(start,end,td,lock,q):
print('write from ',start,' ',end)
for i in range(start,end):
lock.acquire()
q.put(td.iloc[i,:])
lock.release()
print('function writeDF completed')
if __name__ == '__main__':
td=pd.read_csv(r'C:\Users\dorian\Desktop\Analyzer.txt', encoding = "ISO-8859-1", index_col="Start", parse_dates=True, sep=',')
td=td[0:10]
jobs=[]
q = Queue()
lock = Lock()
start = time.time()
begin=0
stop=td.shape[0]/2
for i in range(0,2):
if len(jobs) < 10:
t = Process(target=writeDF, args=(int(begin),int(stop),td, lock,q))
t.start()
jobs.append(t)
begin=stop
stop=stop+(td.shape[0]/2)
else:
t = jobs.pop(0)
t.join()
for p in jobs:
print('try to join element:',p)
p.join()
print('element is join')
l=[]
while not q.empty():
l.append(q.get())
df=pd.DataFrame(l)
end = time.time()
print('Value:',df.shape)
print('2 process Time taken in seconds -', end - start)
实际结果是程序在join()语句上冻结,但是由于start()在join()之前,所以不应该阻止它
multiprocessing.Queue
队列的容量受到用于实现它们的底层操作系统级管道的限制。这意味着你不能等到作者写完后再阅读。写入程序和读取器必须并发执行才能安全地使用multiprocessing.Queue
。你没那么做相关问题 更多 >
编程相关推荐