pickle.py 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195
  1. """ This module extends standard Python's pickle module so that it is
  2. capable of writing more efficient pickle files that contain Panda
  3. objects with shared pointers. In particular, a single Python
  4. structure that contains many NodePaths into the same scene graph will
  5. write the NodePaths correctly when used with this pickle module, so
  6. that when it is unpickled later, the NodePaths will still reference
  7. into the same scene graph together.
  8. If you use the standard pickle module instead, the NodePaths will each
  9. duplicate its own copy of its scene graph.
  10. This is necessary because the standard pickle module doesn't provide a
  11. mechanism for sharing context between different objects written to the
  12. same pickle stream, so each NodePath has to write itself without
  13. knowing about the other NodePaths that will also be writing to the
  14. same stream. This replacement module solves this problem by defining
  15. a ``__reduce_persist__()`` replacement method for ``__reduce__()``,
  16. which accepts a pointer to the Pickler object itself, allowing for
  17. shared context between all objects written by that Pickler.
  18. Unfortunately, cPickle cannot be supported, because it does not
  19. support extensions of this nature. """
  20. __all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
  21. "Unpickler", "dump", "dumps", "load", "loads",
  22. "HIGHEST_PROTOCOL", "DEFAULT_PROTOCOL"]
  23. import sys
  24. from panda3d.core import BamWriter, BamReader, TypedObject
  25. from copyreg import dispatch_table
  26. # A funny replacement for "import pickle" so we don't get confused
  27. # with the local pickle.py.
  28. pickle = __import__('pickle')
  29. HIGHEST_PROTOCOL = pickle.HIGHEST_PROTOCOL
  30. DEFAULT_PROTOCOL = pickle.DEFAULT_PROTOCOL
  31. PickleError = pickle.PickleError
  32. PicklingError = pickle.PicklingError
  33. UnpicklingError = pickle.UnpicklingError
  34. BasePickler = pickle._Pickler
  35. BaseUnpickler = pickle._Unpickler
  36. class Pickler(BasePickler): # type: ignore[misc, valid-type]
  37. def __init__(self, *args, **kw):
  38. self.bamWriter = BamWriter()
  39. self._canonical = {}
  40. BasePickler.__init__(self, *args, **kw)
  41. def clear_memo(self):
  42. BasePickler.clear_memo(self)
  43. self._canonical.clear()
  44. self.bamWriter = BamWriter()
  45. # We have to duplicate most of the save() method, so we can add
  46. # support for __reduce_persist__().
  47. def save(self, obj, save_persistent_id=True):
  48. if self.proto >= 4:
  49. self.framer.commit_frame()
  50. # Check for persistent id (defined by a subclass)
  51. pid = self.persistent_id(obj)
  52. if pid is not None and save_persistent_id:
  53. self.save_pers(pid)
  54. return
  55. # Check if this is a Panda type that we've already saved; if so, store
  56. # a mapping to the canonical copy, so that Python's memoization system
  57. # works properly. This is needed because Python uses id(obj) for
  58. # memoization, but there may be multiple Python wrappers for the same
  59. # C++ pointer, and we don't want that to result in duplication.
  60. t = type(obj)
  61. if issubclass(t, TypedObject.__base__):
  62. canonical = self._canonical.get(obj.this)
  63. if canonical is not None:
  64. obj = canonical
  65. else:
  66. # First time we're seeing this C++ pointer; save it as the
  67. # "canonical" version.
  68. self._canonical[obj.this] = obj
  69. # Check the memo
  70. x = self.memo.get(id(obj))
  71. if x:
  72. self.write(self.get(x[0]))
  73. return
  74. # Check the type dispatch table
  75. f = self.dispatch.get(t)
  76. if f:
  77. f(self, obj) # Call unbound method with explicit self
  78. return
  79. # Check for a class with a custom metaclass; treat as regular class
  80. try:
  81. issc = issubclass(t, type)
  82. except TypeError: # t is not a class (old Boost; see SF #502085)
  83. issc = 0
  84. if issc:
  85. self.save_global(obj)
  86. return
  87. # Check copy_reg.dispatch_table
  88. reduce = dispatch_table.get(t)
  89. if reduce:
  90. rv = reduce(obj)
  91. else:
  92. # New code: check for a __reduce_persist__ method, then
  93. # fall back to standard methods.
  94. reduce = getattr(obj, "__reduce_persist__", None)
  95. if reduce:
  96. rv = reduce(self)
  97. else:
  98. # Check for a __reduce_ex__ method, fall back to __reduce__
  99. reduce = getattr(obj, "__reduce_ex__", None)
  100. if reduce:
  101. rv = reduce(self.proto)
  102. else:
  103. reduce = getattr(obj, "__reduce__", None)
  104. if reduce:
  105. rv = reduce()
  106. else:
  107. raise PicklingError("Can't pickle %r object: %r" %
  108. (t.__name__, obj))
  109. # Check for string returned by reduce(), meaning "save as global"
  110. if type(rv) is str:
  111. self.save_global(obj, rv)
  112. return
  113. # Assert that reduce() returned a tuple
  114. if type(rv) is not tuple:
  115. raise PicklingError("%s must return string or tuple" % reduce)
  116. # Assert that it returned an appropriately sized tuple
  117. l = len(rv)
  118. if not (2 <= l <= 5):
  119. raise PicklingError("Tuple returned by %s must have "
  120. "two to five elements" % reduce)
  121. # Save the reduce() output and finally memoize the object
  122. self.save_reduce(obj=obj, *rv)
  123. class Unpickler(BaseUnpickler): # type: ignore[misc, valid-type]
  124. def __init__(self, *args, **kw):
  125. self.bamReader = BamReader()
  126. BaseUnpickler.__init__(self, *args, **kw)
  127. # Duplicate the load_reduce() function, to provide a special case
  128. # for the reduction function.
  129. def load_reduce(self):
  130. stack = self.stack
  131. args = stack.pop()
  132. func = stack[-1]
  133. # If the function name ends with "_persist", then assume the
  134. # function wants the Unpickler as the first parameter.
  135. func_name = func.__name__
  136. if func_name.endswith('_persist') or func_name.endswith('Persist'):
  137. value = func(self, *args)
  138. else:
  139. # Otherwise, use the existing pickle convention.
  140. value = func(*args)
  141. stack[-1] = value
  142. BaseUnpickler.dispatch[pickle.REDUCE[0]] = load_reduce
  143. # Shorthands
  144. from io import BytesIO
  145. def dump(obj, file, protocol=None):
  146. Pickler(file, protocol).dump(obj)
  147. def dumps(obj, protocol=None):
  148. file = BytesIO()
  149. Pickler(file, protocol).dump(obj)
  150. return file.getvalue()
  151. def load(file):
  152. return Unpickler(file).load()
  153. def loads(str):
  154. file = BytesIO(str)
  155. return Unpickler(file).load()