Browse Source

fix Loader error reporting

rdb 11 years ago
parent
commit
c5dd6c69dd

+ 30 - 19
panda/src/pgraph/loader.cxx

@@ -117,7 +117,7 @@ load_bam_stream(istream &in) {
 ////////////////////////////////////////////////////////////////////
 ////////////////////////////////////////////////////////////////////
 //     Function: Loader::output
 //     Function: Loader::output
 //       Access: Published, Virtual
 //       Access: Published, Virtual
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 ////////////////////////////////////////////////////////////////////
 void Loader::
 void Loader::
 output(ostream &out) const {
 output(ostream &out) const {
@@ -224,7 +224,7 @@ load_file(const Filename &filename, const LoaderOptions &options) const {
     int num_dirs = model_path.get_num_directories();
     int num_dirs = model_path.get_num_directories();
     for (int i = 0; i < num_dirs; ++i) {
     for (int i = 0; i < num_dirs; ++i) {
       Filename pathname(model_path.get_directory(i), this_filename);
       Filename pathname(model_path.get_directory(i), this_filename);
-      PT(PandaNode) result = try_load_file(pathname, this_options, 
+      PT(PandaNode) result = try_load_file(pathname, this_options,
                                            requested_type);
                                            requested_type);
       if (result != (PandaNode *)NULL) {
       if (result != (PandaNode *)NULL) {
         return result;
         return result;
@@ -286,15 +286,14 @@ try_load_file(const Filename &pathname, const LoaderOptions &options,
               LoaderFileType *requested_type) const {
               LoaderFileType *requested_type) const {
   BamCache *cache = BamCache::get_global_ptr();
   BamCache *cache = BamCache::get_global_ptr();
 
 
-  bool cache_only = (options.get_flags() & LoaderOptions::LF_cache_only) != 0;
+  bool allow_ram_cache = requested_type->get_allow_ram_cache(options);
 
 
-  if (requested_type->get_allow_ram_cache(options)) {
+  if (allow_ram_cache) {
     // If we're allowing a RAM cache, use the ModelPool to load the
     // If we're allowing a RAM cache, use the ModelPool to load the
     // file.
     // file.
-    if (!cache_only || ModelPool::has_model(pathname)) {
-      PT(PandaNode) node = ModelPool::load_model(pathname, options);
-      if (node != (PandaNode *)NULL &&
-          (options.get_flags() & LoaderOptions::LF_allow_instance) == 0) {
+    PT(PandaNode) node = ModelPool::get_model(pathname, true);
+    if (node != (PandaNode *)NULL) {
+      if ((options.get_flags() & LoaderOptions::LF_allow_instance) == 0) {
         if (loader_cat.is_debug()) {
         if (loader_cat.is_debug()) {
           loader_cat.debug()
           loader_cat.debug()
             << "Model " << pathname << " found in ModelPool.\n";
             << "Model " << pathname << " found in ModelPool.\n";
@@ -320,15 +319,21 @@ try_load_file(const Filename &pathname, const LoaderOptions &options,
             << "Model " << pathname << " found in disk cache.\n";
             << "Model " << pathname << " found in disk cache.\n";
         }
         }
         PT(PandaNode) result = DCAST(PandaNode, record->get_data());
         PT(PandaNode) result = DCAST(PandaNode, record->get_data());
+
+        if (premunge_data) {
+          SceneGraphReducer sgr;
+          sgr.premunge(result, RenderState::make_empty());
+        }
+
         if (result->is_of_type(ModelRoot::get_class_type())) {
         if (result->is_of_type(ModelRoot::get_class_type())) {
           ModelRoot *model_root = DCAST(ModelRoot, result.p());
           ModelRoot *model_root = DCAST(ModelRoot, result.p());
           model_root->set_fullpath(pathname);
           model_root->set_fullpath(pathname);
           model_root->set_timestamp(record->get_source_timestamp());
           model_root->set_timestamp(record->get_source_timestamp());
-        }
 
 
-        if (premunge_data) {
-          SceneGraphReducer sgr;
-          sgr.premunge(result, RenderState::make_empty());
+          if (allow_ram_cache) {
+            // Store the loaded model in the RAM cache.
+            ModelPool::add_model(pathname, model_root);
+          }
         }
         }
         return result;
         return result;
       }
       }
@@ -339,19 +344,25 @@ try_load_file(const Filename &pathname, const LoaderOptions &options,
     loader_cat.debug()
     loader_cat.debug()
       << "Model " << pathname << " not found in cache.\n";
       << "Model " << pathname << " not found in cache.\n";
   }
   }
-  
+
+  bool cache_only = (options.get_flags() & LoaderOptions::LF_cache_only) != 0;
   if (!cache_only) {
   if (!cache_only) {
     PT(PandaNode) result = requested_type->load_file(pathname, options, record);
     PT(PandaNode) result = requested_type->load_file(pathname, options, record);
-    if (result != (PandaNode *)NULL){ 
+    if (result != (PandaNode *)NULL) {
       if (record != (BamCacheRecord *)NULL) {
       if (record != (BamCacheRecord *)NULL) {
         record->set_data(result, result);
         record->set_data(result, result);
         cache->store(record);
         cache->store(record);
       }
       }
-      
+
       if (premunge_data) {
       if (premunge_data) {
         SceneGraphReducer sgr;
         SceneGraphReducer sgr;
         sgr.premunge(result, RenderState::make_empty());
         sgr.premunge(result, RenderState::make_empty());
       }
       }
+
+      if (allow_ram_cache && result->is_of_type(ModelRoot::get_class_type())) {
+        // Store the loaded model in the RAM cache.
+        ModelPool::add_model(pathname, DCAST(ModelRoot, result.p()));
+      }
       return result;
       return result;
     }
     }
   }
   }
@@ -454,7 +465,7 @@ bool Loader::
 try_save_file(const Filename &pathname, const LoaderOptions &options,
 try_save_file(const Filename &pathname, const LoaderOptions &options,
               PandaNode *node, LoaderFileType *requested_type) const {
               PandaNode *node, LoaderFileType *requested_type) const {
   bool report_errors = ((options.get_flags() & LoaderOptions::LF_report_errors) != 0 || loader_cat.is_debug());
   bool report_errors = ((options.get_flags() & LoaderOptions::LF_report_errors) != 0 || loader_cat.is_debug());
-  
+
   bool result = requested_type->save_file(pathname, options, node);
   bool result = requested_type->save_file(pathname, options, node);
   return result;
   return result;
 }
 }
@@ -493,7 +504,7 @@ load_file_types() {
           loader_cat.debug()
           loader_cat.debug()
             << "done loading file type module: " << name << endl;
             << "done loading file type module: " << name << endl;
         }
         }
-        
+
       } else if (words.size() > 1) {
       } else if (words.size() > 1) {
         // Multiple words: the first n words are filename extensions,
         // Multiple words: the first n words are filename extensions,
         // and the last word is the name of the library to load should
         // and the last word is the name of the library to load should
@@ -501,13 +512,13 @@ load_file_types() {
         LoaderFileTypeRegistry *registry = LoaderFileTypeRegistry::get_global_ptr();
         LoaderFileTypeRegistry *registry = LoaderFileTypeRegistry::get_global_ptr();
         size_t num_extensions = words.size() - 1;
         size_t num_extensions = words.size() - 1;
         string library_name = words[num_extensions];
         string library_name = words[num_extensions];
-        
+
         for (size_t i = 0; i < num_extensions; i++) {
         for (size_t i = 0; i < num_extensions; i++) {
           string extension = words[i];
           string extension = words[i];
           if (extension[0] == '.') {
           if (extension[0] == '.') {
             extension = extension.substr(1);
             extension = extension.substr(1);
           }
           }
-          
+
           registry->register_deferred_type(extension, library_name);
           registry->register_deferred_type(extension, library_name);
         }
         }
       }
       }

+ 4 - 4
panda/src/pgraph/loader.h

@@ -85,16 +85,16 @@ PUBLISHED:
   BLOCKING INLINE void stop_threads();
   BLOCKING INLINE void stop_threads();
   INLINE bool remove(AsyncTask *task);
   INLINE bool remove(AsyncTask *task);
 
 
-  BLOCKING INLINE PT(PandaNode) load_sync(const Filename &filename, 
+  BLOCKING INLINE PT(PandaNode) load_sync(const Filename &filename,
                                           const LoaderOptions &options = LoaderOptions()) const;
                                           const LoaderOptions &options = LoaderOptions()) const;
 
 
-  PT(AsyncTask) make_async_request(const Filename &filename, 
+  PT(AsyncTask) make_async_request(const Filename &filename,
                                    const LoaderOptions &options = LoaderOptions());
                                    const LoaderOptions &options = LoaderOptions());
   INLINE void load_async(AsyncTask *request);
   INLINE void load_async(AsyncTask *request);
 
 
   INLINE bool save_sync(const Filename &filename, const LoaderOptions &options,
   INLINE bool save_sync(const Filename &filename, const LoaderOptions &options,
                         PandaNode *node) const;
                         PandaNode *node) const;
-  PT(AsyncTask) make_async_save_request(const Filename &filename, 
+  PT(AsyncTask) make_async_save_request(const Filename &filename,
                                         const LoaderOptions &options,
                                         const LoaderOptions &options,
                                         PandaNode *node);
                                         PandaNode *node);
   INLINE void save_async(AsyncTask *request);
   INLINE void save_async(AsyncTask *request);
@@ -140,7 +140,7 @@ public:
     return get_class_type();
     return get_class_type();
   }
   }
   virtual TypeHandle force_init_type() {init_type(); return get_class_type();}
   virtual TypeHandle force_init_type() {init_type(); return get_class_type();}
-  
+
 private:
 private:
   static TypeHandle _type_handle;
   static TypeHandle _type_handle;
 
 

+ 1 - 1
panda/src/pgraph/loaderFileTypeBam.cxx

@@ -110,7 +110,7 @@ load_file(const Filename &path, const LoaderOptions &options,
   time_t timestamp = bam_file.get_reader()->get_source()->get_timestamp();
   time_t timestamp = bam_file.get_reader()->get_source()->get_timestamp();
 
 
   PT(PandaNode) node = bam_file.read_node(report_errors);
   PT(PandaNode) node = bam_file.read_node(report_errors);
-  if (node->is_of_type(ModelRoot::get_class_type())) {
+  if (node != (PandaNode *)NULL && node->is_of_type(ModelRoot::get_class_type())) {
     ModelRoot *model_root = DCAST(ModelRoot, node.p());
     ModelRoot *model_root = DCAST(ModelRoot, node.p());
     model_root->set_fullpath(path);
     model_root->set_fullpath(path);
     model_root->set_timestamp(timestamp);
     model_root->set_timestamp(timestamp);

+ 16 - 1
panda/src/pgraph/modelPool.I

@@ -17,7 +17,8 @@
 //     Function: ModelPool::has_model
 //     Function: ModelPool::has_model
 //       Access: Public, Static
 //       Access: Public, Static
 //  Description: Returns true if the model has ever been loaded,
 //  Description: Returns true if the model has ever been loaded,
-//               false otherwise.
+//               false otherwise.  Note that this does not guarantee
+//               that the model is still up-to-date.
 ////////////////////////////////////////////////////////////////////
 ////////////////////////////////////////////////////////////////////
 INLINE bool ModelPool::
 INLINE bool ModelPool::
 has_model(const Filename &filename) {
 has_model(const Filename &filename) {
@@ -45,6 +46,20 @@ verify_model(const Filename &filename) {
   return load_model(filename) != (ModelRoot *)NULL;
   return load_model(filename) != (ModelRoot *)NULL;
 }
 }
 
 
+////////////////////////////////////////////////////////////////////
+//     Function: ModelPool::get_model
+//       Access: Public, Static
+//  Description: Returns the model that has already been previously
+//               loaded, or NULL otherwise.  If verify is true, it
+//               will check if the file is still up-to-date (and
+//               hasn't been modified in the meantime), and if not,
+//               will still return NULL.
+////////////////////////////////////////////////////////////////////
+INLINE ModelRoot *ModelPool::
+get_model(const Filename &filename, bool verify) {
+  return get_ptr()->ns_get_model(filename, verify);
+}
+
 ////////////////////////////////////////////////////////////////////
 ////////////////////////////////////////////////////////////////////
 //     Function: ModelPool::load_model
 //     Function: ModelPool::load_model
 //       Access: Public, Static
 //       Access: Public, Static

+ 34 - 16
panda/src/pgraph/modelPool.cxx

@@ -52,13 +52,12 @@ ns_has_model(const Filename &filename) {
 }
 }
 
 
 ////////////////////////////////////////////////////////////////////
 ////////////////////////////////////////////////////////////////////
-//     Function: ModelPool::ns_load_model
+//     Function: ModelPool::ns_get_model
 //       Access: Private
 //       Access: Private
-//  Description: The nonstatic implementation of load_model().
+//  Description: The nonstatic implementation of get_model().
 ////////////////////////////////////////////////////////////////////
 ////////////////////////////////////////////////////////////////////
 ModelRoot *ModelPool::
 ModelRoot *ModelPool::
-ns_load_model(const Filename &filename, const LoaderOptions &options) {
-  VirtualFileSystem *vfs = VirtualFileSystem::get_global_ptr();
+ns_get_model(const Filename &filename, bool verify) {
 
 
   PT(ModelRoot) cached_model;
   PT(ModelRoot) cached_model;
   bool got_cached_model = false;
   bool got_cached_model = false;
@@ -74,7 +73,7 @@ ns_load_model(const Filename &filename, const LoaderOptions &options) {
     }
     }
   }
   }
 
 
-  if (got_cached_model) {
+  if (got_cached_model && verify) {
     if (pgraph_cat.is_debug()) {
     if (pgraph_cat.is_debug()) {
       pgraph_cat.debug()
       pgraph_cat.debug()
         << "ModelPool found " << cached_model << " for " << filename << "\n";
         << "ModelPool found " << cached_model << " for " << filename << "\n";
@@ -85,6 +84,7 @@ ns_load_model(const Filename &filename, const LoaderOptions &options) {
       // exist (or the model could not be loaded for some reason).
       // exist (or the model could not be loaded for some reason).
       if (cache_check_timestamps) {
       if (cache_check_timestamps) {
         // Check to see if there is a file there now.
         // Check to see if there is a file there now.
+        VirtualFileSystem *vfs = VirtualFileSystem::get_global_ptr();
         if (vfs->exists(filename)) {
         if (vfs->exists(filename)) {
           // There is, so try to load it.
           // There is, so try to load it.
           got_cached_model = false;
           got_cached_model = false;
@@ -93,9 +93,10 @@ ns_load_model(const Filename &filename, const LoaderOptions &options) {
     } else {
     } else {
       // This filename was previously attempted, and successfully
       // This filename was previously attempted, and successfully
       // loaded.
       // loaded.
-      if (cache_check_timestamps && cached_model->get_timestamp() != 0 && 
+      if (cache_check_timestamps && cached_model->get_timestamp() != 0 &&
           !cached_model->get_fullpath().empty()) {
           !cached_model->get_fullpath().empty()) {
         // Compare the timestamp to the file on-disk.
         // Compare the timestamp to the file on-disk.
+        VirtualFileSystem *vfs = VirtualFileSystem::get_global_ptr();
         PT(VirtualFile) vfile = vfs->get_file(cached_model->get_fullpath());
         PT(VirtualFile) vfile = vfs->get_file(cached_model->get_fullpath());
         if (vfile == NULL) {
         if (vfile == NULL) {
           // The file has disappeared!  Look further along the model-path.
           // The file has disappeared!  Look further along the model-path.
@@ -116,12 +117,29 @@ ns_load_model(const Filename &filename, const LoaderOptions &options) {
         << "ModelPool returning " << cached_model << " for " << filename << "\n";
         << "ModelPool returning " << cached_model << " for " << filename << "\n";
     }
     }
     return cached_model;
     return cached_model;
+  } else {
+    return NULL;
+  }
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: ModelPool::ns_load_model
+//       Access: Private
+//  Description: The nonstatic implementation of load_model().
+////////////////////////////////////////////////////////////////////
+ModelRoot *ModelPool::
+ns_load_model(const Filename &filename, const LoaderOptions &options) {
+
+  // First check if it has already been loaded and is still current.
+  PT(ModelRoot) cached_model = ns_get_model(filename, true);
+  if (cached_model != (ModelRoot *)NULL) {
+    return cached_model;
   }
   }
 
 
   // Look on disk for the current file.
   // Look on disk for the current file.
   LoaderOptions new_options(options);
   LoaderOptions new_options(options);
   new_options.set_flags((new_options.get_flags() | LoaderOptions::LF_no_ram_cache) &
   new_options.set_flags((new_options.get_flags() | LoaderOptions::LF_no_ram_cache) &
-                        ~(LoaderOptions::LF_search | LoaderOptions::LF_report_errors));
+                        ~LoaderOptions::LF_search);
 
 
   Loader *model_loader = Loader::get_global_ptr();
   Loader *model_loader = Loader::get_global_ptr();
   PT(PandaNode) panda_node = model_loader->load_sync(filename, new_options);
   PT(PandaNode) panda_node = model_loader->load_sync(filename, new_options);
@@ -133,7 +151,7 @@ ns_load_model(const Filename &filename, const LoaderOptions &options) {
   } else {
   } else {
     if (panda_node->is_of_type(ModelRoot::get_class_type())) {
     if (panda_node->is_of_type(ModelRoot::get_class_type())) {
       node = DCAST(ModelRoot, panda_node);
       node = DCAST(ModelRoot, panda_node);
-      
+
     } else {
     } else {
       // We have to construct a ModelRoot node to put it under.
       // We have to construct a ModelRoot node to put it under.
       node = new ModelRoot(filename);
       node = new ModelRoot(filename);
@@ -154,10 +172,6 @@ ns_load_model(const Filename &filename, const LoaderOptions &options) {
       return (*ti).second;
       return (*ti).second;
     }
     }
 
 
-    if (pgraph_cat.is_debug()) {
-      pgraph_cat.debug()
-        << "ModelPool storing " << node << " for " << filename << "\n";
-    }
     _models[filename] = node;
     _models[filename] = node;
   }
   }
 
 
@@ -172,6 +186,10 @@ ns_load_model(const Filename &filename, const LoaderOptions &options) {
 void ModelPool::
 void ModelPool::
 ns_add_model(const Filename &filename, ModelRoot *model) {
 ns_add_model(const Filename &filename, ModelRoot *model) {
   LightMutexHolder holder(_lock);
   LightMutexHolder holder(_lock);
+  if (pgraph_cat.is_debug()) {
+    pgraph_cat.debug()
+      << "ModelPool storing " << model << " for " << filename << "\n";
+  }
   // We blow away whatever model was there previously, if any.
   // We blow away whatever model was there previously, if any.
   _models[filename] = model;
   _models[filename] = model;
 }
 }
@@ -270,19 +288,19 @@ ns_list_contents(ostream &out) const {
   LightMutexHolder holder(_lock);
   LightMutexHolder holder(_lock);
 
 
   out << "model pool contents:\n";
   out << "model pool contents:\n";
-  
+
   Models::const_iterator ti;
   Models::const_iterator ti;
   int num_models = 0;
   int num_models = 0;
   for (ti = _models.begin(); ti != _models.end(); ++ti) {
   for (ti = _models.begin(); ti != _models.end(); ++ti) {
     if ((*ti).second != NULL) {
     if ((*ti).second != NULL) {
       ++num_models;
       ++num_models;
       out << (*ti).first << "\n"
       out << (*ti).first << "\n"
-          << "  (count = " << (*ti).second->get_model_ref_count() 
+          << "  (count = " << (*ti).second->get_model_ref_count()
           << ")\n";
           << ")\n";
     }
     }
   }
   }
-  
-  out << "total number of models: " << num_models << " (plus " 
+
+  out << "total number of models: " << num_models << " (plus "
       << _models.size() - num_models << " entries for nonexistent files)\n";
       << _models.size() - num_models << " entries for nonexistent files)\n";
 }
 }
 
 

+ 2 - 0
panda/src/pgraph/modelPool.h

@@ -49,6 +49,7 @@ class EXPCL_PANDA_PGRAPH ModelPool {
 PUBLISHED:
 PUBLISHED:
   INLINE static bool has_model(const Filename &filename);
   INLINE static bool has_model(const Filename &filename);
   INLINE static bool verify_model(const Filename &filename);
   INLINE static bool verify_model(const Filename &filename);
+  INLINE static ModelRoot *get_model(const Filename &filename, bool verify);
   BLOCKING INLINE static ModelRoot *load_model(const Filename &filename,
   BLOCKING INLINE static ModelRoot *load_model(const Filename &filename,
                                                const LoaderOptions &options = LoaderOptions());
                                                const LoaderOptions &options = LoaderOptions());
 
 
@@ -70,6 +71,7 @@ private:
   INLINE ModelPool();
   INLINE ModelPool();
 
 
   bool ns_has_model(const Filename &filename);
   bool ns_has_model(const Filename &filename);
+  ModelRoot *ns_get_model(const Filename &filename, bool verify);
   ModelRoot *ns_load_model(const Filename &filename,
   ModelRoot *ns_load_model(const Filename &filename,
                            const LoaderOptions &options);
                            const LoaderOptions &options);
   void ns_add_model(const Filename &filename, ModelRoot *model);
   void ns_add_model(const Filename &filename, ModelRoot *model);