Browse Source

introduce stdpy.pickle module with support for common-node NodePaths within a pickle stream

David Rose 16 years ago
parent
commit
f6f9e269f4

+ 157 - 0
direct/src/stdpy/pickle.py

@@ -0,0 +1,157 @@
+""" This module extends standard Python's pickle module so that it is
+capable of writing more efficient pickle files that contain Panda
+objects with shared pointers.  In particular, a single Python
+structure that contains many NodePaths into the same scene graph will
+write the NodePaths correctly when used with this pickle module, so
+that when it is unpickled later, the NodePaths will still reference
+into the same scene graph together.
+
+If you use the standard pickle module instead, the NodePaths will each
+duplicate its own copy of its scene graph.
+
+This is necessary because the standard pickle module doesn't provide a
+mechanism for sharing context between different objects written to the
+same pickle stream, so each NodePath has to write itself without
+knowing about the other NodePaths that will also be writing to the
+same stream.  This replacement module solves this problem by defining
+a __reduce_persist__() replacement method for __reduce__(), which
+accepts a pointer to the Pickler object itself, allowing for shared
+context between all objects written by that Pickler.
+
+Unfortunately, cPickle cannot be supported, because it does not
+support extensions of this nature. """
+
+from types import *
+from copy_reg import dispatch_table
+from pandac.PandaModules import BamWriter, BamReader
+
+# A funny replacement for "import pickle" so we don't get confused
+# with the local pickle.py.
+pickle = __import__('pickle')
+
+class Pickler(pickle.Pickler):
+
+    def __init__(self, *args, **kw):
+        self.bamWriter = BamWriter()
+        pickle.Pickler.__init__(self, *args, **kw)
+
+    # We have to duplicate most of the save() method, so we can add
+    # support for __reduce_persist__().
+    
+    def save(self, obj):
+        # Check for persistent id (defined by a subclass)
+        pid = self.persistent_id(obj)
+        if pid:
+            self.save_pers(pid)
+            return
+
+        # Check the memo
+        x = self.memo.get(id(obj))
+        if x:
+            self.write(self.get(x[0]))
+            return
+
+        # Check the type dispatch table
+        t = type(obj)
+        f = self.dispatch.get(t)
+        if f:
+            f(self, obj) # Call unbound method with explicit self
+            return
+
+        # Check for a class with a custom metaclass; treat as regular class
+        try:
+            issc = issubclass(t, TypeType)
+        except TypeError: # t is not a class (old Boost; see SF #502085)
+            issc = 0
+        if issc:
+            self.save_global(obj)
+            return
+
+        # Check copy_reg.dispatch_table
+        reduce = dispatch_table.get(t)
+        if reduce:
+            rv = reduce(obj)
+        else:
+            # New code: check for a __reduce_persist__ method, then
+            # fall back to standard methods.
+            reduce = getattr(obj, "__reduce_persist__", None)
+            if reduce:
+                rv = reduce(self)
+            else:
+                # Check for a __reduce_ex__ method, fall back to __reduce__
+                reduce = getattr(obj, "__reduce_ex__", None)
+                if reduce:
+                    rv = reduce(self.proto)
+                else:
+                    reduce = getattr(obj, "__reduce__", None)
+                    if reduce:
+                        rv = reduce()
+                    else:
+                        raise PicklingError("Can't pickle %r object: %r" %
+                                            (t.__name__, obj))
+
+        # Check for string returned by reduce(), meaning "save as global"
+        if type(rv) is StringType:
+            self.save_global(obj, rv)
+            return
+
+        # Assert that reduce() returned a tuple
+        if type(rv) is not TupleType:
+            raise PicklingError("%s must return string or tuple" % reduce)
+
+        # Assert that it returned an appropriately sized tuple
+        l = len(rv)
+        if not (2 <= l <= 5):
+            raise PicklingError("Tuple returned by %s must have "
+                                "two to five elements" % reduce)
+
+        # Save the reduce() output and finally memoize the object
+        self.save_reduce(obj=obj, *rv)
+
+class Unpickler(pickle.Unpickler):
+
+    def __init__(self, *args, **kw):
+        self.bamReader = BamReader()
+        pickle.Unpickler.__init__(self, *args, **kw)
+
+    # Duplicate the load_reduce() function, to provide a special case
+    # for the reduction function.
+    
+    def load_reduce(self):
+        stack = self.stack
+        args = stack.pop()
+        func = stack[-1]
+
+        # If the function name ends with "Persist", then assume the
+        # function wants the Unpickler as the first parameter.
+        if func.__name__.endswith('Persist'):
+            value = func(self, *args)
+        else:
+            # Otherwise, use the existing pickle convention.
+            value = func(*args)
+
+        stack[-1] = value
+    pickle.Unpickler.dispatch[pickle.REDUCE] = load_reduce
+
+
+# Shorthands
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+def dump(obj, file, protocol=None):
+    Pickler(file, protocol).dump(obj)
+
+def dumps(obj, protocol=None):
+    file = StringIO()
+    Pickler(file, protocol).dump(obj)
+    return file.getvalue()
+
+def load(file):
+    return Unpickler(file).load()
+
+def loads(str):
+    file = StringIO(str)
+    return Unpickler(file).load()

+ 1 - 0
dtool/src/interrogate/interfaceMakerPythonNative.cxx

@@ -99,6 +99,7 @@ RenameSet methodRenameDictionary[] = {
     { "operator typecast bool", "__nonzero__",  0 },
     { "__nonzero__"   , "__nonzero__",          0 }, 
     { "__reduce__"    , "__reduce__",           0 },
+    { "__reduce_persist__"    , "__reduce_persist__", 0 },
     { "__copy__"      , "__copy__",             0 },
     { "__deepcopy__"  , "__deepcopy__",         0 },
     { "print"         , "Cprint",               0 },

+ 24 - 0
panda/src/gobj/textureCollection.I

@@ -44,3 +44,27 @@ operator + (const TextureCollection &other) const {
   a += other;
   return a;
 }
+
+////////////////////////////////////////////////////////////////////
+//     Function: TextureCollection::append
+//       Access: Published
+//  Description: Adds a new Texture to the collection.  This method
+//               duplicates the add_texture() method; it is provided to
+//               satisfy Python's naming convention.
+////////////////////////////////////////////////////////////////////
+void TextureCollection::
+append(Texture *texture) {
+  add_texture(texture);
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: TextureCollection::extend
+//       Access: Published
+//  Description: Appends the other list onto the end of this one.
+//               This method duplicates the += operator; it is
+//               provided to satisfy Python's naming convention.
+////////////////////////////////////////////////////////////////////
+INLINE void TextureCollection::
+extend(const TextureCollection &other) {
+  operator += (other);
+}

+ 83 - 4
panda/src/gobj/textureCollection.cxx

@@ -16,6 +16,13 @@
 
 #include "indent.h"
 
+#ifdef HAVE_PYTHON
+#include "py_panda.h"  
+#ifndef CPPPARSER
+extern EXPCL_PANDA_PUTIL Dtool_PyTypedObject Dtool_Texture;
+#endif  // CPPPARSER
+#endif  // HAVE_PYTHON
+
 ////////////////////////////////////////////////////////////////////
 //     Function: TextureCollection::Constructor
 //       Access: Published
@@ -46,13 +53,85 @@ operator = (const TextureCollection &copy) {
   _textures = copy._textures;
 }
 
+#ifdef HAVE_PYTHON
+////////////////////////////////////////////////////////////////////
+//     Function: TextureCollection::Constructor
+//       Access: Published
+//  Description: This special constructor accepts a Python list of
+//               Textures.  Since this constructor accepts a generic
+//               PyObject *, it should be the last constructor listed
+//               in the class record.
+////////////////////////////////////////////////////////////////////
+TextureCollection::
+TextureCollection(PyObject *sequence) {
+  if (!PySequence_Check(sequence)) {
+    // If passed with a non-sequence, this isn't the right constructor.
+    PyErr_SetString(PyExc_TypeError, "TextureCollection constructor requires a sequence");
+    return;
+  }
+
+  int size = PySequence_Size(sequence);
+  for (int i = 0; i < size; ++i) {
+    PyObject *item = PySequence_GetItem(sequence, i);
+    if (item == NULL) {
+      return;
+    }
+    Texture *tex = NULL;
+    DTOOL_Call_ExtractThisPointerForType(item, &Dtool_Texture, (void **)&tex);
+    Py_DECREF(item);
+    if (tex == NULL) {
+      // If one of the items in the sequence is not a Texture, can't
+      // use this constructor.
+      ostringstream stream;
+      stream << "Element " << i << " in sequence passed to TextureCollection constructor is not a Texture";
+      string str = stream.str();
+      PyErr_SetString(PyExc_TypeError, str.c_str());
+      return;
+    }
+
+    add_texture(tex);
+  }
+}
+#endif  // HAVE_PYTHON
+
+#ifdef HAVE_PYTHON
+////////////////////////////////////////////////////////////////////
+//     Function: TextureCollection::__reduce__
+//       Access: Published
+//  Description: This special Python method is implement to provide
+//               support for the pickle module.
+////////////////////////////////////////////////////////////////////
+PyObject *TextureCollection::
+__reduce__(PyObject *self) const {
+  // Here we will return a 4-tuple: (Class, (args), None, iterator),
+  // where iterator is an iterator that will yield successive
+  // Textures.
+
+  // We should return at least a 2-tuple, (Class, (args)): the
+  // necessary class object whose constructor we should call
+  // (e.g. this), and the arguments necessary to reconstruct this
+  // object.
+
+  PyObject *this_class = PyObject_Type(self);
+  if (this_class == NULL) {
+    return NULL;
+  }
+
+  // Since a TextureCollection is itself an iterator, we can simply
+  // pass it as the fourth tuple component.
+  PyObject *result = Py_BuildValue("(O()OO)", this_class, Py_None, self);
+  Py_DECREF(this_class);
+  return result;
+}
+#endif  // HAVE_PYTHON
+
 ////////////////////////////////////////////////////////////////////
 //     Function: TextureCollection::add_texture
 //       Access: Published
 //  Description: Adds a new Texture to the collection.
 ////////////////////////////////////////////////////////////////////
 void TextureCollection::
-add_texture(Texture *node_texture) {
+add_texture(Texture *texture) {
   // If the pointer to our internal array is shared by any other
   // TextureCollections, we have to copy the array now so we won't
   // inadvertently modify any of our brethren TextureCollection
@@ -64,7 +143,7 @@ add_texture(Texture *node_texture) {
     _textures.v() = old_textures.v();
   }
 
-  _textures.push_back(node_texture);
+  _textures.push_back(texture);
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -75,10 +154,10 @@ add_texture(Texture *node_texture) {
 //               not a member of the collection.
 ////////////////////////////////////////////////////////////////////
 bool TextureCollection::
-remove_texture(Texture *node_texture) {
+remove_texture(Texture *texture) {
   int texture_index = -1;
   for (int i = 0; texture_index == -1 && i < (int)_textures.size(); i++) {
-    if (_textures[i] == node_texture) {
+    if (_textures[i] == texture) {
       texture_index = i;
     }
   }

+ 11 - 2
panda/src/gobj/textureCollection.h

@@ -31,8 +31,13 @@ PUBLISHED:
   void operator = (const TextureCollection &copy);
   INLINE ~TextureCollection();
 
-  void add_texture(Texture *node_texture);
-  bool remove_texture(Texture *node_texture);
+#ifdef HAVE_PYTHON
+  TextureCollection(PyObject *sequence);
+  PyObject *__reduce__(PyObject *self) const;
+#endif
+
+  void add_texture(Texture *texture);
+  bool remove_texture(Texture *texture);
   void add_textures_from(const TextureCollection &other);
   void remove_textures_from(const TextureCollection &other);
   void remove_duplicate_textures();
@@ -49,6 +54,10 @@ PUBLISHED:
   INLINE void operator += (const TextureCollection &other);
   INLINE TextureCollection operator + (const TextureCollection &other) const;
 
+  // Method names to satisfy Python's conventions.
+  INLINE void append(Texture *texture);
+  INLINE void extend(const TextureCollection &other);
+
   void output(ostream &out) const;
   void write(ostream &out, int indent_level = 0) const;
 

+ 96 - 8
panda/src/pgraph/nodePath.cxx

@@ -74,6 +74,14 @@ int NodePath::_max_search_depth = 7000;
 TypeHandle NodePath::_type_handle;
 
 
+#ifdef HAVE_PYTHON
+#include "py_panda.h"  
+#ifndef CPPPARSER
+extern EXPCL_PANDA_PUTIL Dtool_PyTypedObject Dtool_BamWriter;
+extern EXPCL_PANDA_PUTIL Dtool_PyTypedObject Dtool_BamReader;
+#endif  // CPPPARSER
+#endif  // HAVE_PYTHON
+
 // ***Begin temporary transition code for operator bool
 enum EmptyNodePathType {
   ENP_future,
@@ -196,9 +204,33 @@ __deepcopy__(PyObject *self, PyObject *memo) const {
 //       Access: Published
 //  Description: This special Python method is implement to provide
 //               support for the pickle module.
+//
+//               This hooks into the native pickle and cPickle
+//               modules, but it cannot properly handle
+//               self-referential BAM objects.
 ////////////////////////////////////////////////////////////////////
 PyObject *NodePath::
 __reduce__(PyObject *self) const {
+  return __reduce_persist__(self, NULL);
+}
+#endif  // HAVE_PYTHON
+
+#ifdef HAVE_PYTHON
+////////////////////////////////////////////////////////////////////
+//     Function: NodePath::__reduce_persist__
+//       Access: Published
+//  Description: This special Python method is implement to provide
+//               support for the pickle module.
+//
+//               This is similar to __reduce__, but it provides
+//               additional support for the missing persistent-state
+//               object needed to properly support self-referential
+//               BAM objects written to the pickle stream.  This hooks
+//               into the pickle and cPickle modules implemented in
+//               direct/src/stdpy.
+////////////////////////////////////////////////////////////////////
+PyObject *NodePath::
+__reduce_persist__(PyObject *self, PyObject *pickler) const {
   // We should return at least a 2-tuple, (Class, (args)): the
   // necessary class object whose constructor we should call
   // (e.g. this), and the arguments necessary to reconstruct this
@@ -218,11 +250,23 @@ __reduce__(PyObject *self) const {
     return result;
   }
 
+  BamWriter *writer = NULL;
+  if (pickler != NULL) {
+    PyObject *py_writer = PyObject_GetAttrString(pickler, "bamWriter");
+    if (py_writer == NULL) {
+      // It's OK if there's no bamWriter.
+      PyErr_Clear();
+    } else {
+      DTOOL_Call_ExtractThisPointerForType(py_writer, &Dtool_BamWriter, (void **)&writer);
+      Py_DECREF(py_writer);
+    }
+  }
+
   // We have a non-empty NodePath.  We need to streamify the
   // underlying node.
 
   string bam_stream;
-  if (!node()->encode_to_bam_stream(bam_stream)) {
+  if (!node()->encode_to_bam_stream(bam_stream, writer)) {
     ostringstream stream;
     stream << "Could not bamify object of type " << node()->get_type() << "\n";
     string message = stream.str();
@@ -236,16 +280,33 @@ __reduce__(PyObject *self) const {
     return NULL;
   }
 
-  PyObject *func = TypedWritable::find_global_decode(this_class, "pyDecodeNodePathFromBamStream");
-  if (func == NULL) {
-    PyErr_SetString(PyExc_TypeError, "Couldn't find pyDecodeNodePathFromBamStream()");
-    Py_DECREF(this_class);
-    return NULL;
+  PyObject *func;
+  if (writer != NULL) {
+    // The modified pickle support: call the "persistent" version of
+    // this function, which receives the unpickler itself as an
+    // additional parameter.
+    func = TypedWritable::find_global_decode(this_class, "pyDecodeNodePathFromBamStreamPersist");
+    if (func == NULL) {
+      PyErr_SetString(PyExc_TypeError, "Couldn't find pyDecodeNodePathFromBamStreamPersist()");
+      Py_DECREF(this_class);
+      return NULL;
+    }
+
+  } else {
+    // The traditional pickle support: call the non-persistent version
+    // of this function.
+
+    func = TypedWritable::find_global_decode(this_class, "pyDecodeNodePathFromBamStream");
+    if (func == NULL) {
+      PyErr_SetString(PyExc_TypeError, "Couldn't find pyDecodeNodePathFromBamStream()");
+      Py_DECREF(this_class);
+      return NULL;
+    }
   }
-  Py_DECREF(this_class);
 
   PyObject *result = Py_BuildValue("(O(s#))", func, bam_stream.data(), bam_stream.size());
   Py_DECREF(func);
+  Py_DECREF(this_class);
   return result;
 }
 #endif  // HAVE_PYTHON
@@ -7467,7 +7528,34 @@ r_find_all_materials(PandaNode *node, const RenderState *state,
 ////////////////////////////////////////////////////////////////////
 NodePath
 py_decode_NodePath_from_bam_stream(const string &data) {
-  PT(PandaNode) node = PandaNode::decode_from_bam_stream(data);
+  return py_decode_NodePath_from_bam_stream_persist(NULL, data);
+}
+#endif  // HAVE_PYTHON
+
+
+#ifdef HAVE_PYTHON
+////////////////////////////////////////////////////////////////////
+//     Function: py_decode_NodePath_from_bam_stream_persist
+//       Access: Published
+//  Description: This wrapper is defined as a global function to suit
+//               pickle's needs.
+////////////////////////////////////////////////////////////////////
+NodePath
+py_decode_NodePath_from_bam_stream_persist(PyObject *unpickler, const string &data) {
+
+  BamReader *reader = NULL;
+  if (unpickler != NULL) {
+    PyObject *py_reader = PyObject_GetAttrString(unpickler, "bamReader");
+    if (py_reader == NULL) {
+      // It's OK if there's no bamReader.
+      PyErr_Clear();
+    } else {
+      DTOOL_Call_ExtractThisPointerForType(py_reader, &Dtool_BamReader, (void **)&reader);
+      Py_DECREF(py_reader);
+    }
+  }
+
+  PT(PandaNode) node = PandaNode::decode_from_bam_stream(data, reader);
   if (node == (PandaNode *)NULL) {
     PyErr_SetString(PyExc_ValueError, "Could not unpack bam stream");
     return NodePath();

+ 2 - 0
panda/src/pgraph/nodePath.h

@@ -172,6 +172,7 @@ PUBLISHED:
   NodePath __copy__() const;
   PyObject *__deepcopy__(PyObject *self, PyObject *memo) const;
   PyObject *__reduce__(PyObject *self) const;
+  PyObject *__reduce_persist__(PyObject *self, PyObject *pickler) const;
 #endif
 
   INLINE static NodePath not_found();
@@ -960,6 +961,7 @@ INLINE ostream &operator << (ostream &out, const NodePath &node_path);
 #ifdef HAVE_PYTHON
 BEGIN_PUBLISH
 NodePath py_decode_NodePath_from_bam_stream(const string &data);
+NodePath py_decode_NodePath_from_bam_stream_persist(PyObject *unpickler, const string &data);
 END_PUBLISH
 #endif
 

+ 24 - 0
panda/src/pgraph/nodePathCollection.I

@@ -45,6 +45,30 @@ operator + (const NodePathCollection &other) const {
   return a;
 }
 
+////////////////////////////////////////////////////////////////////
+//     Function: NodePathCollection::append
+//       Access: Published
+//  Description: Adds a new NodePath to the collection.  This method
+//               duplicates the add_path() method; it is provided to
+//               satisfy Python's naming convention.
+////////////////////////////////////////////////////////////////////
+void NodePathCollection::
+append(const NodePath &node_path) {
+  add_path(node_path);
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: NodePathCollection::extend
+//       Access: Published
+//  Description: Appends the other list onto the end of this one.
+//               This method duplicates the += operator; it is
+//               provided to satisfy Python's naming convention.
+////////////////////////////////////////////////////////////////////
+INLINE void NodePathCollection::
+extend(const NodePathCollection &other) {
+  operator += (other);
+}
+
 ////////////////////////////////////////////////////////////////////
 //     Function: NodePathCollection::ls
 //       Access: Published

+ 79 - 0
panda/src/pgraph/nodePathCollection.cxx

@@ -21,6 +21,13 @@
 
 #include "indent.h"
 
+#ifdef HAVE_PYTHON
+#include "py_panda.h"  
+#ifndef CPPPARSER
+extern EXPCL_PANDA_PUTIL Dtool_PyTypedObject Dtool_NodePath;
+#endif  // CPPPARSER
+#endif  // HAVE_PYTHON
+
 ////////////////////////////////////////////////////////////////////
 //     Function: NodePathCollection::Constructor
 //       Access: Published
@@ -51,6 +58,78 @@ operator = (const NodePathCollection &copy) {
   _node_paths = copy._node_paths;
 }
 
+#ifdef HAVE_PYTHON
+////////////////////////////////////////////////////////////////////
+//     Function: NodePathCollection::Constructor
+//       Access: Published
+//  Description: This special constructor accepts a Python list of
+//               NodePaths.  Since this constructor accepts a generic
+//               PyObject *, it should be the last constructor listed
+//               in the class record.
+////////////////////////////////////////////////////////////////////
+NodePathCollection::
+NodePathCollection(PyObject *sequence) {
+  if (!PySequence_Check(sequence)) {
+    // If passed with a non-sequence, this isn't the right constructor.
+    PyErr_SetString(PyExc_TypeError, "NodePathCollection constructor requires a sequence");
+    return;
+  }
+
+  int size = PySequence_Size(sequence);
+  for (int i = 0; i < size; ++i) {
+    PyObject *item = PySequence_GetItem(sequence, i);
+    if (item == NULL) {
+      return;
+    }
+    NodePath *np = NULL;
+    DTOOL_Call_ExtractThisPointerForType(item, &Dtool_NodePath, (void **)&np);
+    Py_DECREF(item);
+    if (np == NULL) {
+      // If one of the items in the sequence is not a NodePath, can't
+      // use this constructor.
+      ostringstream stream;
+      stream << "Element " << i << " in sequence passed to NodePathCollection constructor is not a NodePath";
+      string str = stream.str();
+      PyErr_SetString(PyExc_TypeError, str.c_str());
+      return;
+    }
+
+    add_path(*np);
+  }
+}
+#endif  // HAVE_PYTHON
+
+#ifdef HAVE_PYTHON
+////////////////////////////////////////////////////////////////////
+//     Function: NodePathCollection::__reduce__
+//       Access: Published
+//  Description: This special Python method is implement to provide
+//               support for the pickle module.
+////////////////////////////////////////////////////////////////////
+PyObject *NodePathCollection::
+__reduce__(PyObject *self) const {
+  // Here we will return a 4-tuple: (Class, (args), None, iterator),
+  // where iterator is an iterator that will yield successive
+  // NodePaths.
+
+  // We should return at least a 2-tuple, (Class, (args)): the
+  // necessary class object whose constructor we should call
+  // (e.g. this), and the arguments necessary to reconstruct this
+  // object.
+
+  PyObject *this_class = PyObject_Type(self);
+  if (this_class == NULL) {
+    return NULL;
+  }
+
+  // Since a NodePathCollection is itself an iterator, we can simply
+  // pass it as the fourth tuple component.
+  PyObject *result = Py_BuildValue("(O()OO)", this_class, Py_None, self);
+  Py_DECREF(this_class);
+  return result;
+}
+#endif  // HAVE_PYTHON
+
 ////////////////////////////////////////////////////////////////////
 //     Function: NodePathCollection::add_path
 //       Access: Published

+ 9 - 0
panda/src/pgraph/nodePathCollection.h

@@ -33,6 +33,11 @@ PUBLISHED:
   void operator = (const NodePathCollection &copy);
   INLINE ~NodePathCollection();
 
+#ifdef HAVE_PYTHON
+  NodePathCollection(PyObject *sequence);
+  PyObject *__reduce__(PyObject *self) const;
+#endif
+
   void add_path(const NodePath &node_path);
   bool remove_path(const NodePath &node_path);
   void add_paths_from(const NodePathCollection &other);
@@ -50,6 +55,10 @@ PUBLISHED:
   INLINE void operator += (const NodePathCollection &other);
   INLINE NodePathCollection operator + (const NodePathCollection &other) const;
 
+  // Method names to satisfy Python's conventions.
+  INLINE void append(const NodePath &node_path);
+  INLINE void extend(const NodePathCollection &other);
+
   // Handy operations on many NodePaths at once.
   INLINE void ls() const;
   void ls(ostream &out, int indent_level = 0) const;

+ 2 - 2
panda/src/pgraph/pandaNode.cxx

@@ -2680,11 +2680,11 @@ is_ambient_light() const {
 //               the reference count on the return value.
 ////////////////////////////////////////////////////////////////////
 PT(PandaNode) PandaNode::
-decode_from_bam_stream(const string &data) {
+decode_from_bam_stream(const string &data, BamReader *reader) {
   TypedWritable *object;
   ReferenceCount *ref_ptr;
 
-  if (!TypedWritable::decode_raw_from_bam_stream(object, ref_ptr, data)) {
+  if (!TypedWritable::decode_raw_from_bam_stream(object, ref_ptr, data, reader)) {
     return NULL;
   }
 

+ 1 - 1
panda/src/pgraph/pandaNode.h

@@ -307,7 +307,7 @@ PUBLISHED:
 
 
 PUBLISHED:
-  static PT(PandaNode) decode_from_bam_stream(const string &data);
+  static PT(PandaNode) decode_from_bam_stream(const string &data, BamReader *reader = NULL);
 
 protected:
   class BoundsData;

+ 14 - 10
panda/src/putil/bamReader.I

@@ -15,7 +15,7 @@
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReaderAuxData::Constructor
-//       Access: Public
+//       Access: Published
 //  Description: 
 ////////////////////////////////////////////////////////////////////
 INLINE BamReaderAuxData::
@@ -24,7 +24,7 @@ BamReaderAuxData() {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::get_filename
-//       Access: Public
+//       Access: Published
 //  Description: If a BAM is a file, then the BamReader should
 //               contain the name of the file.  This enables the
 //               reader to interpret pathnames in the BAM as relative
@@ -37,7 +37,7 @@ get_filename() const {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::get_loader_options
-//       Access: Public
+//       Access: Published
 //  Description: Returns the LoaderOptions passed to the loader when
 //               the model was requested, if any.
 ////////////////////////////////////////////////////////////////////
@@ -48,7 +48,7 @@ get_loader_options() const {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::set_loader_options
-//       Access: Public
+//       Access: Published
 //  Description: Specifies the LoaderOptions for this BamReader.
 ////////////////////////////////////////////////////////////////////
 INLINE void BamReader::
@@ -58,19 +58,20 @@ set_loader_options(const LoaderOptions &options) {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::is_eof
-//       Access: Public
+//       Access: Published
 //  Description: Returns true if the reader has reached end-of-file,
 //               false otherwise.  This call is only valid after a
 //               call to read_object().
 ////////////////////////////////////////////////////////////////////
 INLINE bool BamReader::
 is_eof() const {
+  nassertr(_source != NULL, true);
   return _source->is_eof();
 }
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::get_file_major_ver
-//       Access: Public
+//       Access: Published
 //  Description: Returns the major version number of the Bam file
 //               currently being read.
 ////////////////////////////////////////////////////////////////////
@@ -81,7 +82,7 @@ get_file_major_ver() const {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::get_file_minor_ver
-//       Access: Public
+//       Access: Published
 //  Description: Returns the minor version number of the Bam file
 //               currently being read.
 ////////////////////////////////////////////////////////////////////
@@ -92,7 +93,7 @@ get_file_minor_ver() const {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::get_file_endian
-//       Access: Public
+//       Access: Published
 //  Description: Returns the endian preference indicated by the Bam
 //               file currently being read.  This does not imply that
 //               every number is stored using the indicated
@@ -106,7 +107,7 @@ get_file_endian() const {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::get_current_major_ver
-//       Access: Public
+//       Access: Published
 //  Description: Returns the major version number of Bam files
 //               supported by the current code base.  This must match
 //               get_file_major_ver() in order to successfully read a
@@ -119,7 +120,7 @@ get_current_major_ver() const {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::get_current_minor_ver
-//       Access: Public
+//       Access: Published
 //  Description: Returns the minor version number of Bam files
 //               supported by the current code base.  This must match
 //               or exceed get_file_minor_ver() in order to
@@ -139,6 +140,7 @@ get_current_minor_ver() const {
 ////////////////////////////////////////////////////////////////////
 INLINE VirtualFile *BamReader::
 get_file() {
+  nassertr(_source != NULL, NULL);
   return _source->get_file();
 }
 
@@ -156,6 +158,7 @@ get_file() {
 ////////////////////////////////////////////////////////////////////
 INLINE streampos BamReader::
 get_file_pos() {
+  nassertr(_source != NULL, 0);
   return _source->get_file_pos();
 }
 
@@ -192,6 +195,7 @@ create_factory() {
 ////////////////////////////////////////////////////////////////////
 INLINE bool BamReader::
 get_datagram(Datagram &datagram) {
+  nassertr(_source != NULL, false);
   if (_source->is_error()) {
     return false;
   }

+ 32 - 12
panda/src/putil/bamReader.cxx

@@ -35,13 +35,14 @@ const int BamReader::_cur_minor = _bam_minor_ver;
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::Constructor
-//       Access: Public
+//       Access: Published
 //  Description:
 ////////////////////////////////////////////////////////////////////
 BamReader::
-BamReader(DatagramGenerator *generator, const Filename &name)
-  : _source(generator), _filename(name)
+BamReader(DatagramGenerator *source, const Filename &name)
+  : _source(source), _filename(name)
 {
+  _needs_init = true;
   _num_extra_objects = 0;
   _nesting_level = 0;
   _now_creating = _created_objs.end();
@@ -54,7 +55,7 @@ BamReader(DatagramGenerator *generator, const Filename &name)
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::Destructor
-//       Access: Public
+//       Access: Published
 //  Description:
 ////////////////////////////////////////////////////////////////////
 BamReader::
@@ -63,9 +64,25 @@ BamReader::
   nassertv(_nesting_level == 0);
 }
 
+////////////////////////////////////////////////////////////////////
+//     Function: BamReader::set_source
+//       Access: Published
+//  Description: Changes the source of future datagrams for this
+//               BamReader.  This also implicitly calls init() if it
+//               has not already been called.
+////////////////////////////////////////////////////////////////////
+void BamReader::
+set_source(DatagramGenerator *source) {
+  _source = source;
+  if (_needs_init && _source != NULL) {
+    bool success = init();
+    nassertv(success);
+  }
+}
+
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::init
-//       Access: Public
+//       Access: Published
 //  Description: Initializes the BamReader prior to reading any
 //               objects from its source.  This includes reading the
 //               Bam header.
@@ -75,6 +92,9 @@ BamReader::
 ////////////////////////////////////////////////////////////////////
 bool BamReader::
 init() {
+  nassertr(_source != NULL, false);
+  nassertr(_needs_init, false);
+  _needs_init = false;
   Datagram header;
 
   if (_source->is_error()) {
@@ -135,7 +155,7 @@ init() {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::set_aux_data
-//       Access: Public
+//       Access: Published
 //  Description: Associates an arbitrary block of data with the
 //               indicated object (or NULL), and the indicated name.
 //
@@ -179,7 +199,7 @@ set_aux_data(TypedWritable *obj, const string &name, BamReader::AuxData *data) {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::get_aux_data
-//       Access: Public
+//       Access: Published
 //  Description: Returns the pointer previously associated with the
 //               bam reader by a previous call to set_aux_data(), or
 //               NULL if data with the indicated key has not been set.
@@ -201,7 +221,7 @@ get_aux_data(TypedWritable *obj, const string &name) const {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::read_object
-//       Access: Public
+//       Access: Published
 //  Description: Reads a single object from the Bam file.  If the
 //               object type is known, a new object of the appropriate
 //               type is created and returned; otherwise, NULL is
@@ -241,7 +261,7 @@ read_object() {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::read_object
-//       Access: Public
+//       Access: Published
 //  Description: Reads a single object from the Bam file.  
 //
 //               This flavor of read_object() returns both a
@@ -321,7 +341,7 @@ read_object(TypedWritable *&ptr, ReferenceCount *&ref_ptr) {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::resolve
-//       Access: Public
+//       Access: Published
 //  Description: This may be called at any time during processing of
 //               the Bam file to resolve all the known pointers so
 //               far.  It is usually called at the end of the
@@ -470,7 +490,7 @@ resolve() {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::change_pointer
-//       Access: Public
+//       Access: Published
 //  Description: Indicates that an object recently read from the bam
 //               stream should be replaced with a new object.  Any
 //               future occurrences of the original object in the
@@ -530,7 +550,7 @@ change_pointer(const TypedWritable *orig_pointer, const TypedWritable *new_point
 
 ////////////////////////////////////////////////////////////////////
 //     Function: BamReader::read_handle
-//       Access: Public
+//       Access: Published
 //  Description: Reads a TypeHandle out of the Datagram.
 ////////////////////////////////////////////////////////////////////
 TypeHandle BamReader::

+ 4 - 1
panda/src/putil/bamReader.h

@@ -125,9 +125,11 @@ public:
 
 PUBLISHED:
   // The primary interface for a caller.
-  BamReader(DatagramGenerator *generator, const Filename &name = "");
+  BamReader(DatagramGenerator *source = NULL, const Filename &name = "");
   ~BamReader();
 
+  void set_source(DatagramGenerator *source);
+
   bool init();
 
   class AuxData;
@@ -221,6 +223,7 @@ private:
   static WritableFactory *_factory;
 
   DatagramGenerator *_source;
+  bool _needs_init;
   
   bool _long_object_id;
   bool _long_pta_id;

+ 40 - 2
panda/src/putil/bamWriter.cxx

@@ -30,12 +30,23 @@
 //  Description:
 ////////////////////////////////////////////////////////////////////
 BamWriter::
-BamWriter(DatagramSink *sink, const Filename &name) :
+BamWriter(DatagramSink *target, const Filename &name) :
   _filename(name),
-  _target(sink)
+  _target(target)
 {
   ++_writing_seq;
   _next_boc = BOC_adjunct;
+  _needs_init = true;
+
+  // Initialize the next object and PTA ID's.  These start counting at
+  // 1, since 0 is reserved for NULL.
+  _next_object_id = 1;
+  _long_object_id = false;
+  _next_pta_id = 1;
+  _long_pta_id = false;
+
+  _file_endian = bam_endian;
+  _file_texture_mode = bam_texture_mode;
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -59,6 +70,25 @@ BamWriter::
   }
 }
 
+////////////////////////////////////////////////////////////////////
+//     Function: BamWriter::set_target
+//       Access: Published
+//  Description: Changes the destination of future datagrams written
+//               by the BamWriter.  This also implicitly calls init()
+//               if it has not already been called.
+////////////////////////////////////////////////////////////////////
+void BamWriter::
+set_target(DatagramSink *target) {
+  if (_target != NULL) {
+    _target->flush();
+  }
+  _target = target;
+
+  if (_needs_init && _target != NULL) {
+    init();
+  }
+}
+
 ////////////////////////////////////////////////////////////////////
 //     Function: BamWriter::init
 //       Access: Published
@@ -71,6 +101,10 @@ BamWriter::
 ////////////////////////////////////////////////////////////////////
 bool BamWriter::
 init() {
+  nassertr(_target != NULL, false);
+  nassertr(_needs_init, false);
+  _needs_init = false;
+
   // Initialize the next object and PTA ID's.  These start counting at
   // 1, since 0 is reserved for NULL.
   _next_object_id = 1;
@@ -120,6 +154,8 @@ init() {
 ////////////////////////////////////////////////////////////////////
 bool BamWriter::
 write_object(const TypedWritable *object) {
+  nassertr(_target != NULL, false);
+
   // Increment the _writing_seq, so we can check for newly stale
   // objects during this operation.
   ++_writing_seq;
@@ -186,6 +222,7 @@ has_object(const TypedWritable *object) const {
 ////////////////////////////////////////////////////////////////////
 void BamWriter::
 flush() {
+  nassertv(_target != NULL);
   _target->flush();
 }
 
@@ -555,6 +592,7 @@ enqueue_object(const TypedWritable *object) {
 ////////////////////////////////////////////////////////////////////
 bool BamWriter::
 flush_queue() {
+  nassertr(_target != NULL, false);
   // Each object we write may append more to the queue.
   while (!_object_queue.empty()) {
     const TypedWritable *object = _object_queue.front();

+ 4 - 1
panda/src/putil/bamWriter.h

@@ -72,9 +72,11 @@
 ////////////////////////////////////////////////////////////////////
 class EXPCL_PANDA_PUTIL BamWriter : public BamEnums {
 PUBLISHED:
-  BamWriter(DatagramSink *sink, const Filename &name = "");
+  BamWriter(DatagramSink *target = NULL, const Filename &name = "");
   ~BamWriter();
 
+  void set_target(DatagramSink *target);
+
   bool init();
   INLINE const Filename &get_filename() const;
   bool write_object(const TypedWritable *obj);
@@ -165,6 +167,7 @@ private:
 
   // The destination to write all the output to.
   DatagramSink *_target;
+  bool _needs_init;
 
   friend class TypedWritable;
 };

+ 164 - 24
panda/src/putil/typedWritable.cxx

@@ -25,6 +25,13 @@ LightMutex TypedWritable::_bam_writers_lock;
 TypeHandle TypedWritable::_type_handle;
 TypedWritable* const TypedWritable::Null = (TypedWritable*)0L;
 
+#ifdef HAVE_PYTHON
+#include "py_panda.h"  
+#ifndef CPPPARSER
+extern EXPCL_PANDA_PUTIL Dtool_PyTypedObject Dtool_BamWriter;
+#endif  // CPPPARSER
+#endif  // HAVE_PYTHON
+
 ////////////////////////////////////////////////////////////////////
 //     Function: TypedWritable::Destructor
 //       Access: Public, Virtual
@@ -155,9 +162,33 @@ as_reference_count() {
 //       Access: Published
 //  Description: This special Python method is implement to provide
 //               support for the pickle module.
+//
+//               This hooks into the native pickle and cPickle
+//               modules, but it cannot properly handle
+//               self-referential BAM objects.
 ////////////////////////////////////////////////////////////////////
 PyObject *TypedWritable::
 __reduce__(PyObject *self) const {
+  return __reduce_persist__(self, NULL);
+}
+#endif  // HAVE_PYTHON
+
+#ifdef HAVE_PYTHON
+////////////////////////////////////////////////////////////////////
+//     Function: TypedWritable::__reduce_persist__
+//       Access: Published
+//  Description: This special Python method is implement to provide
+//               support for the pickle module.
+//
+//               This is similar to __reduce__, but it provides
+//               additional support for the missing persistent-state
+//               object needed to properly support self-referential
+//               BAM objects written to the pickle stream.  This hooks
+//               into the pickle and cPickle modules implemented in
+//               direct/src/stdpy.
+////////////////////////////////////////////////////////////////////
+PyObject *TypedWritable::
+__reduce_persist__(PyObject *self, PyObject *pickler) const {
   // We should return at least a 2-tuple, (Class, (args)): the
   // necessary class object whose constructor we should call
   // (e.g. this), and the arguments necessary to reconstruct this
@@ -175,9 +206,21 @@ __reduce__(PyObject *self) const {
   }
   Py_DECREF(method);
 
+  BamWriter *writer = NULL;
+  if (pickler != NULL) {
+    PyObject *py_writer = PyObject_GetAttrString(pickler, "bamWriter");
+    if (py_writer == NULL) {
+      // It's OK if there's no bamWriter.
+      PyErr_Clear();
+    } else {
+      DTOOL_Call_ExtractThisPointerForType(py_writer, &Dtool_BamWriter, (void **)&writer);
+      Py_DECREF(py_writer);
+    }
+  }
+
   // First, streamify the object, if possible.
   string bam_stream;
-  if (!encode_to_bam_stream(bam_stream)) {
+  if (!encode_to_bam_stream(bam_stream, writer)) {
     ostringstream stream;
     stream << "Could not bamify object of type " << get_type() << "\n";
     string message = stream.str();
@@ -191,11 +234,28 @@ __reduce__(PyObject *self) const {
     return NULL;
   }
 
-  PyObject *func = find_global_decode(this_class, "pyDecodeTypedWritableFromBamStream");
-  if (func == NULL) {
-    PyErr_SetString(PyExc_TypeError, "Couldn't find pyDecodeTypedWritableFromBamStream()");
-    Py_DECREF(this_class);
-    return NULL;
+  PyObject *func;
+  if (writer != NULL) {
+    // The modified pickle support: call the "persistent" version of
+    // this function, which receives the unpickler itself as an
+    // additional parameter.
+    func = find_global_decode(this_class, "pyDecodeTypedWritableFromBamStreamPersist");
+    if (func == NULL) {
+      PyErr_SetString(PyExc_TypeError, "Couldn't find pyDecodeTypedWritableFromBamStreamPersist()");
+      Py_DECREF(this_class);
+      return NULL;
+    }
+
+  } else {
+    // The traditional pickle support: call the non-persistent version
+    // of this function.
+
+    func = find_global_decode(this_class, "pyDecodeTypedWritableFromBamStream");
+    if (func == NULL) {
+      PyErr_SetString(PyExc_TypeError, "Couldn't find pyDecodeTypedWritableFromBamStream()");
+      Py_DECREF(this_class);
+      return NULL;
+    }
   }
 
   PyObject *result = Py_BuildValue("(O(Os#))", func, this_class, bam_stream.data(), bam_stream.size());
@@ -220,7 +280,7 @@ __reduce__(PyObject *self) const {
 //               of them together.
 ////////////////////////////////////////////////////////////////////
 bool TypedWritable::
-encode_to_bam_stream(string &data) const {
+encode_to_bam_stream(string &data, BamWriter *writer) const {
   data.clear();
   ostringstream stream;
 
@@ -232,11 +292,13 @@ encode_to_bam_stream(string &data) const {
       return false;
     }
     
-    if (!dout.write_header(_bam_header)) {
-      return false;
-    }
+    if (writer == NULL) {
+      // Create our own writer.
     
-    {
+      if (!dout.write_header(_bam_header)) {
+        return false;
+      }
+
       BamWriter writer(&dout, "bam_stream");
       if (!writer.init()) {
         return false;
@@ -245,6 +307,14 @@ encode_to_bam_stream(string &data) const {
       if (!writer.write_object(this)) {
         return false;
       }
+    } else {
+      // Use the existing writer.
+      writer->set_target(&dout);
+      bool result = writer->write_object(this);
+      writer->set_target(NULL);
+      if (!result) {
+        return false;
+      }
     }
   }
 
@@ -281,25 +351,26 @@ encode_to_bam_stream(string &data) const {
 ////////////////////////////////////////////////////////////////////
 bool TypedWritable::
 decode_raw_from_bam_stream(TypedWritable *&ptr, ReferenceCount *&ref_ptr,
-                           const string &data) {
+                           const string &data, BamReader *reader) {
   istringstream stream(data);
 
   DatagramInputFile din;
   if (!din.open(stream)) {
     return false;
   }
+
+  if (reader == NULL) {
+    // Create a local reader.
   
-  string head;
-  if (!din.read_header(head, _bam_header.size())) {
-    return false;
-  }
-  
-  if (head != _bam_header) {
-    return false;
-  }
+    string head;
+    if (!din.read_header(head, _bam_header.size())) {
+      return false;
+    }
+    
+    if (head != _bam_header) {
+      return false;
+    }
 
-  // We scope this so we can control when the BamReader destructs.
-  {
     BamReader reader(&din, "bam_stream");
     if (!reader.init()) {
       return false;
@@ -321,8 +392,33 @@ decode_raw_from_bam_stream(TypedWritable *&ptr, ReferenceCount *&ref_ptr,
     // Protect the pointer from accidental deletion when the BamReader
     // goes away.
     ref_ptr->ref();
+
+  } else {
+    // Use the existing reader.
+    reader->set_source(&din);
+    if (!reader->read_object(ptr, ref_ptr)) {
+      reader->set_source(NULL);
+      return false;
+    }
+    
+    if (!reader->resolve()) {
+      reader->set_source(NULL);
+      return false;
+    }
+    
+    if (ref_ptr == NULL) {
+      // Can't support non-reference-counted objects.
+      reader->set_source(NULL);
+      return false;
+    }
+
+    // This BamReader isn't going away, but we have to balance the
+    // unref() below.
+    ref_ptr->ref();
+    reader->set_source(NULL);
   }
 
+
   // Now decrement the ref count, without deleting the object.  This
   // may reduce the reference count to zero, but that's OK--we trust
   // the caller to manage the reference count from this point on.
@@ -393,10 +489,47 @@ find_global_decode(PyObject *this_class, const char *func_name) {
 //       Access: Published
 //  Description: This wrapper is defined as a global function to suit
 //               pickle's needs.
+//
+//               This hooks into the native pickle and cPickle
+//               modules, but it cannot properly handle
+//               self-referential BAM objects.
 ////////////////////////////////////////////////////////////////////
 PyObject *
 py_decode_TypedWritable_from_bam_stream(PyObject *this_class, const string &data) {
-  // We need the function TypedWritable::decode_from_bam_stream, which
+  return py_decode_TypedWritable_from_bam_stream_persist(NULL, this_class, data);
+}
+#endif  // HAVE_PYTHON
+
+
+#ifdef HAVE_PYTHON
+////////////////////////////////////////////////////////////////////
+//     Function: py_decode_TypedWritable_from_bam_stream_persist
+//       Access: Published
+//  Description: This wrapper is defined as a global function to suit
+//               pickle's needs.
+//
+//               This is similar to
+//               py_decode_TypedWritable_from_bam_stream, but it
+//               provides additional support for the missing
+//               persistent-state object needed to properly support
+//               self-referential BAM objects written to the pickle
+//               stream.  This hooks into the pickle and cPickle
+//               modules implemented in direct/src/stdpy.
+////////////////////////////////////////////////////////////////////
+PyObject *
+py_decode_TypedWritable_from_bam_stream_persist(PyObject *pickler, PyObject *this_class, const string &data) {
+
+  PyObject *py_reader = NULL;
+  if (pickler != NULL) {
+    py_reader = PyObject_GetAttrString(pickler, "bamReader");
+    if (py_reader == NULL) {
+      // It's OK if there's no bamReader.
+      PyErr_Clear();
+    }
+  }
+
+  // We need the function PandaNode::decode_from_bam_stream or
+  // TypedWritableReferenceCount::decode_from_bam_stream, which
   // invokes the BamReader to reconstruct this object.  Since we use
   // the specific object's class as the pointer, we get the particular
   // instance of decode_from_bam_stream appropriate to this class.
@@ -406,7 +539,14 @@ py_decode_TypedWritable_from_bam_stream(PyObject *this_class, const string &data
     return NULL;
   }
 
-  PyObject *result = PyObject_CallFunction(func, (char *)"(s#)", data.data(), data.size());
+  PyObject *result;
+  if (py_reader != NULL){
+    result = PyObject_CallFunction(func, (char *)"(s#O)", data.data(), data.size(), py_reader);
+    Py_DECREF(py_reader);
+  } else {
+    result = PyObject_CallFunction(func, (char *)"(s#)", data.data(), data.size());
+  }
+
   if (result == NULL) {
     return NULL;
   }

+ 5 - 2
panda/src/putil/typedWritable.h

@@ -62,13 +62,15 @@ PUBLISHED:
 
 #ifdef HAVE_PYTHON
   PyObject *__reduce__(PyObject *self) const;
+  PyObject *__reduce_persist__(PyObject *self, PyObject *pickler) const;
 #endif
 
   INLINE string encode_to_bam_stream() const;
-  bool encode_to_bam_stream(string &data) const;
+  bool encode_to_bam_stream(string &data, BamWriter *writer = NULL) const;
   static bool decode_raw_from_bam_stream(TypedWritable *&ptr, 
                                          ReferenceCount *&ref_ptr,
-                                         const string &data);
+                                         const string &data,
+                                         BamReader *reader = NULL);
 
 public:
 #ifdef HAVE_PYTHON
@@ -112,6 +114,7 @@ private:
 #ifdef HAVE_PYTHON
 BEGIN_PUBLISH
 PyObject *py_decode_TypedWritable_from_bam_stream(PyObject *this_class, const string &data);
+PyObject *py_decode_TypedWritable_from_bam_stream_persist(PyObject *unpickler, PyObject *this_class, const string &data);
 END_PUBLISH
 #endif
 

+ 2 - 2
panda/src/putil/typedWritableReferenceCount.cxx

@@ -43,11 +43,11 @@ as_reference_count() {
 //               on the return value.
 ////////////////////////////////////////////////////////////////////
 PT(TypedWritableReferenceCount) TypedWritableReferenceCount::
-decode_from_bam_stream(const string &data) {
+decode_from_bam_stream(const string &data, BamReader *reader) {
   TypedWritable *object;
   ReferenceCount *ref_ptr;
 
-  if (!TypedWritable::decode_raw_from_bam_stream(object, ref_ptr, data)) {
+  if (!TypedWritable::decode_raw_from_bam_stream(object, ref_ptr, data, reader)) {
     return NULL;
   }
 

+ 1 - 1
panda/src/putil/typedWritableReferenceCount.h

@@ -41,7 +41,7 @@ public:
   virtual ReferenceCount *as_reference_count();
 
 PUBLISHED:
-  static PT(TypedWritableReferenceCount) decode_from_bam_stream(const string &data);
+  static PT(TypedWritableReferenceCount) decode_from_bam_stream(const string &data, BamReader *reader = NULL);
 
 public:
   virtual TypeHandle get_type() const {