pickle.py 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157
  1. """ This module extends standard Python's pickle module so that it is
  2. capable of writing more efficient pickle files that contain Panda
  3. objects with shared pointers. In particular, a single Python
  4. structure that contains many NodePaths into the same scene graph will
  5. write the NodePaths correctly when used with this pickle module, so
  6. that when it is unpickled later, the NodePaths will still reference
  7. into the same scene graph together.
  8. If you use the standard pickle module instead, the NodePaths will each
  9. duplicate its own copy of its scene graph.
  10. This is necessary because the standard pickle module doesn't provide a
  11. mechanism for sharing context between different objects written to the
  12. same pickle stream, so each NodePath has to write itself without
  13. knowing about the other NodePaths that will also be writing to the
  14. same stream. This replacement module solves this problem by defining
  15. a ``__reduce_persist__()`` replacement method for ``__reduce__()``,
  16. which accepts a pointer to the Pickler object itself, allowing for
  17. shared context between all objects written by that Pickler.
  18. Unfortunately, cPickle cannot be supported, because it does not
  19. support extensions of this nature. """
  20. import sys
  21. from panda3d.core import BamWriter, BamReader
  22. from copyreg import dispatch_table
  23. # A funny replacement for "import pickle" so we don't get confused
  24. # with the local pickle.py.
  25. pickle = __import__('pickle')
  26. class Pickler(pickle.Pickler):
  27. def __init__(self, *args, **kw):
  28. self.bamWriter = BamWriter()
  29. pickle.Pickler.__init__(self, *args, **kw)
  30. # We have to duplicate most of the save() method, so we can add
  31. # support for __reduce_persist__().
  32. def save(self, obj):
  33. # Check for persistent id (defined by a subclass)
  34. pid = self.persistent_id(obj)
  35. if pid:
  36. self.save_pers(pid)
  37. return
  38. # Check the memo
  39. x = self.memo.get(id(obj))
  40. if x:
  41. self.write(self.get(x[0]))
  42. return
  43. # Check the type dispatch table
  44. t = type(obj)
  45. f = self.dispatch.get(t)
  46. if f:
  47. f(self, obj) # Call unbound method with explicit self
  48. return
  49. # Check for a class with a custom metaclass; treat as regular class
  50. try:
  51. issc = issubclass(t, type)
  52. except TypeError: # t is not a class (old Boost; see SF #502085)
  53. issc = 0
  54. if issc:
  55. self.save_global(obj)
  56. return
  57. # Check copy_reg.dispatch_table
  58. reduce = dispatch_table.get(t)
  59. if reduce:
  60. rv = reduce(obj)
  61. else:
  62. # New code: check for a __reduce_persist__ method, then
  63. # fall back to standard methods.
  64. reduce = getattr(obj, "__reduce_persist__", None)
  65. if reduce:
  66. rv = reduce(self)
  67. else:
  68. # Check for a __reduce_ex__ method, fall back to __reduce__
  69. reduce = getattr(obj, "__reduce_ex__", None)
  70. if reduce:
  71. rv = reduce(self.proto)
  72. else:
  73. reduce = getattr(obj, "__reduce__", None)
  74. if reduce:
  75. rv = reduce()
  76. else:
  77. raise PicklingError("Can't pickle %r object: %r" %
  78. (t.__name__, obj))
  79. # Check for string returned by reduce(), meaning "save as global"
  80. if type(rv) is str:
  81. self.save_global(obj, rv)
  82. return
  83. # Assert that reduce() returned a tuple
  84. if type(rv) is not tuple:
  85. raise PicklingError("%s must return string or tuple" % reduce)
  86. # Assert that it returned an appropriately sized tuple
  87. l = len(rv)
  88. if not (2 <= l <= 5):
  89. raise PicklingError("Tuple returned by %s must have "
  90. "two to five elements" % reduce)
  91. # Save the reduce() output and finally memoize the object
  92. self.save_reduce(obj=obj, *rv)
  93. class Unpickler(pickle.Unpickler):
  94. def __init__(self, *args, **kw):
  95. self.bamReader = BamReader()
  96. pickle.Unpickler.__init__(self, *args, **kw)
  97. # Duplicate the load_reduce() function, to provide a special case
  98. # for the reduction function.
  99. def load_reduce(self):
  100. stack = self.stack
  101. args = stack.pop()
  102. func = stack[-1]
  103. # If the function name ends with "Persist", then assume the
  104. # function wants the Unpickler as the first parameter.
  105. if func.__name__.endswith('Persist'):
  106. value = func(self, *args)
  107. else:
  108. # Otherwise, use the existing pickle convention.
  109. value = func(*args)
  110. stack[-1] = value
  111. #FIXME: how to replace in Python 3?
  112. if sys.version_info < (3, 0):
  113. pickle.Unpickler.dispatch[pickle.REDUCE] = load_reduce
  114. # Shorthands
  115. from io import BytesIO
  116. def dump(obj, file, protocol=None):
  117. Pickler(file, protocol).dump(obj)
  118. def dumps(obj, protocol=None):
  119. file = BytesIO()
  120. Pickler(file, protocol).dump(obj)
  121. return file.getvalue()
  122. def load(file):
  123. return Unpickler(file).load()
  124. def loads(str):
  125. file = BytesIO(str)
  126. return Unpickler(file).load()