gpt4 book ai didi

python - 使用 Python 多处理无法将 LDAP 对象共享给子进程

转载 作者:太空宇宙 更新时间:2023-11-04 03:41:52 26 4
gpt4 key购买 nike

我将有一个子进程来发送 LDAP 查询和其他子进程来检索响应。在两个进程之间共享 LDAP 对象时出现问题。有人可以指出解决方案吗?

import ldap
from multiprocessing import Process


def send_ldap(ldap_conn):
print ldap_conn

def receive_ldap(ldap_conn):
print ldap_conn

def main():
ldap_conn = ldap.initialize('ldap://abc:12345')
ldap_sender = Process(target=send_ldap, args=(ldap_conn,))
ldap_receiver = Process(target=receive_ldap, args=(ldap_conn,))
ldap_sender.start()


if __name__ == '__main__':
main()

错误与 pickle 模块有关:

    Traceback (most recent call last):
File "t.py", line 22, in <module>
main()
File "t.py", line 16, in main
ldap_sender.start()
File "c:\python27\lib\multiprocessing\process.py", line 130, in start
self._popen = Popen(self)
File "c:\python27\lib\multiprocessing\forking.py", line 277, in __init__
dump(process_obj, to_child, HIGHEST_PROTOCOL)
File "c:\python27\lib\multiprocessing\forking.py", line 199, in dump
ForkingPickler(file, protocol).dump(obj)
File "c:\python27\lib\pickle.py", line 224, in dump
self.save(obj)
File "c:\python27\lib\pickle.py", line 331, in save
self.save_reduce(obj=obj, *rv)
File "c:\python27\lib\pickle.py", line 419, in save_reduce
save(state)
File "c:\python27\lib\pickle.py", line 286, in save
f(self, obj) # Call unbound method with explicit self
File "c:\python27\lib\pickle.py", line 649, in save_dict
self._batch_setitems(obj.iteritems())
File "c:\python27\lib\pickle.py", line 681, in _batch_setitems
save(v)
File "c:\python27\lib\pickle.py", line 286, in save
f(self, obj) # Call unbound method with explicit self
File "c:\python27\lib\pickle.py", line 548, in save_tuple
save(element)
File "c:\python27\lib\pickle.py", line 286, in save
f(self, obj) # Call unbound method with explicit self
File "c:\python27\lib\pickle.py", line 725, in save_inst
save(stuff)
File "c:\python27\lib\pickle.py", line 286, in save
f(self, obj) # Call unbound method with explicit self
File "c:\python27\lib\pickle.py", line 649, in save_dict
self._batch_setitems(obj.iteritems())
File "c:\python27\lib\pickle.py", line 681, in _batch_setitems
save(v)
File "c:\python27\lib\pickle.py", line 286, in save
f(self, obj) # Call unbound method with explicit self
File "c:\python27\lib\pickle.py", line 725, in save_inst
save(stuff)
File "c:\python27\lib\pickle.py", line 286, in save
f(self, obj) # Call unbound method with explicit self
File "c:\python27\lib\pickle.py", line 649, in save_dict
self._batch_setitems(obj.iteritems())
File "c:\python27\lib\pickle.py", line 681, in _batch_setitems
save(v)
File "c:\python27\lib\pickle.py", line 331, in save
self.save_reduce(obj=obj, *rv)
Traceback (most recent call last):
File "c:\python27\lib\pickle.py", line 396, in save_reduce
File "<string>", line 1, in <module>
File "c:\python27\lib\multiprocessing\forking.py", line 381, in main
save(cls)
File "c:\python27\lib\pickle.py", line 286, in save
self = load(from_parent)
File "c:\python27\lib\pickle.py", line 1378, in load
f(self, obj) # Call unbound method with explicit self
File "c:\python27\lib\pickle.py", line 748, in save_global
return Unpickler(file).load()
File "c:\python27\lib\pickle.py", line 858, in load
(obj, module, name))
pickle.PicklingError: Can't pickle <type 'thread.lock'>: it's not found as thread.lock
dispatch[key](self)
File "c:\python27\lib\pickle.py", line 880, in load_eof
raise EOFError
EOFError

最佳答案

问题是 ldap_conn 对象不可 picklable,这是在 Windows 进程之间发送它所必需的。它不可 picklable,因为它在内部使用了一些不可 picklable 的 threading.Lock 对象。该图书馆实际上声称提供 ReconnectLDAPObject那应该是可腌制的,但它已损坏并且也不可腌制。不过,我们可以通过将其子类化并修复错误来修复此问题(它错过了在酸洗之前移除其中一个内部锁):

from ldap.ldapobject import ReconnectLDAPObject
from multiprocessing import Process
import ldap

class PicklableLDAPObject(ReconnectLDAPObject):
def __getstate__(self):
d = ReconnectLDAPObject.__getstate__(self)
del d['_reconnect_lock']
return d

def __setstate__(self, d):
self._reconnect_lock = ldap.LDAPLock(desc='reconnect lock within %s' % (repr(self)))
ReconnectLDAPObject.__setstate__(self, d)

def send_ldap(ldap_conn):
print ldap_conn

def receive_ldap(ldap_conn):
print ldap_conn

def main():
#ldap_conn = ldap.initialize('ldap://abc:12345')
ldap_conn = PicklableLDAPObject('ldap://abc:12345')
ldap_sender = Process(target=send_ldap, args=(ldap_conn,))
ldap_receiver = Process(target=receive_ldap, args=(ldap_conn,))
ldap_sender.start()

现在该对象可以很好地 pickle,这意味着它应该可用于 多处理 目的。

关于python - 使用 Python 多处理无法将 LDAP 对象共享给子进程,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/26060426/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com