关于python多重处理的一个非常令人困惑的问题

2024-09-24 12:27:01 发布

您现在位置:Python中文网/ 问答频道 /正文

我在过滤文件的工作中遇到了一个问题,例如,如果我先运行这个代码,它很慢,并且运行到global\u step=100000,然后我终止这个代码,然后我再次运行,它会很快,直到global\u step=100000,然后它会变慢。我不知道为什么会这样,有人能给我一个建议吗,我的英语很差

from multiprocessing import Queue, Process, Value
read_dir = '..'
write_dir ='..'
dict_dir = '..'
Q_read = Queue(10000)
Q_write = Queue(10000)
global_step = Value('i', 0)
def Push_queue(Q_read, r_dir):
    f = open(r_dir, 'r')
    lines = f.readlines()
    for line in lines:
        Q_read.put(line)
    f.close()

def Write_from_Queue(Q_write, w_dir):
    fw = open(w_dir, 'w')
    while True:
        try:
            line = Q_write.get(timeout=30)
            fw.write(line)
            fw.flush()
        except:
            fw.close()
            return



def asy_run(Q_read, Q_write, global_step, char2ind_dict):
    while True:
        line = Q_read.get(timeout=30)
        #########################

        line = .......do something

        #########################
        Q_write.put(line)
        global_step.value +=1

def main_run(num, char2ind_dict):
    process_list = []
    process_push = Process(target=Push_queue, args=(Q_read, read_dir))
    process_push.start()
    for i in range(num):
        process_i = Process(target=asy_run, args=(Q_read, Q_write, global_step, char2ind_dict))
        process_i.start()
        process_list.append(process_i)
    process_write = Process(target=Write_from_Queue, args=(Q_write, write_dir))
    process_write.start()
    process_push.join()    
    Q_read.join()
    Q_write.join()
    for p in process_list:
        p.join()
    process_write.join()

if __name__ =='__main__':
    char2ind_dict = get_dict(dict_dir)
    main_run(50, char2ind_dict)

Tags: runreadqueuedefstepdirlineprocess