gpt4 book ai didi

python - 从多个进程将数据添加到队列

转载 作者:太空宇宙 更新时间:2023-11-03 21:14:01 25 4
gpt4 key购买 nike

我尝试多次调用同一进程并捕获返回

from time import sleep
from multiprocessing import Process, Queue
from multiprocessing import Pool

def main():
#Call processes here.
var1 = 100000000
var2 = 1000000
var3 = 100000

q = Queue()

p1 = Process(target=call_pool, args=(q,var1))
p2 = Process(target=call_pool, args=(q,var2))
p3 = Process(target=call_pool, args=(q,var3))

p1.start()
p2.start()
p3.start()

p1.join()
p2.join()
p3.join()

print q.get() # prints "[42, None, 'hello']"

def call_pool(queue,var):
#Call pool here
queue.put(len(pool_gen(var)))


def pool_gen(var):
pool = Pool()
data = pool.map(f, range(var))
return data

def f(x):
return x*x*x*x

if __name__ == '__main__':
start = time.time()
main()
end = time.time()
print(end - start)

我认为队列没有附加功能。我需要定义 3 个队列并将数据放入 3 倍以上吗?

最佳答案

我想答案是使用管理器......

import time
from time import sleep
from multiprocessing import Process, Manager
from multiprocessing import Pool

def main():
#Call processes here.
var1 = 100000
var2 = 100000
var3 = 100000

manager = Manager()
d = manager.list()

p1 = Process(target=call_pool, args=(d,var1))
p2 = Process(target=call_pool, args=(d,var2))
p3 = Process(target=call_pool, args=(d,var3))

p1.start()
p2.start()
p3.start()

p1.join()
p2.join()
p3.join()

print d # prints "[42, None, 'hello']"

def call_pool(d,var):
#Call pool here
d.append(len(pool_gen(var)))


def pool_gen(var):
pool = Pool()
data = pool.map(f, range(var))
return data

def f(x):
return x*x*x*x

if __name__ == '__main__':
start = time.time()
main()
end = time.time()
print(end - start)

关于python - 从多个进程将数据添加到队列,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/54848895/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com