Browse Source

Merge branch 'master' into mingw-patch

rdb 11 years ago
parent
commit
c62aea5cd7
100 changed files with 1151 additions and 6665 deletions
  1. 0 8
      direct/src/autorestart/Sources.pp
  2. 0 901
      direct/src/autorestart/autorestart.c
  3. 1 2
      direct/src/directscripts/Doxyfile.python
  4. 0 29
      direct/src/heapq/Sources.pp
  5. 0 240
      direct/src/heapq/heapq.cxx
  6. 1 1
      direct/src/http/LandingPage.py
  7. 1 1
      direct/src/http/LandingPageHTML.py
  8. 1 1
      direct/src/http/WebRequest.py
  9. 0 504
      direct/src/pyinst/Builder.py
  10. 0 0
      direct/src/pyinst/Sources.pp
  11. 0 0
      direct/src/pyinst/__init__.py
  12. 0 246
      direct/src/pyinst/archive.py
  13. 0 226
      direct/src/pyinst/archive_rt.py
  14. 0 81
      direct/src/pyinst/archivebuilder.py
  15. 0 169
      direct/src/pyinst/bindepend.py
  16. 0 204
      direct/src/pyinst/carchive.py
  17. 0 157
      direct/src/pyinst/carchive_rt.py
  18. 0 178
      direct/src/pyinst/finder.py
  19. 0 138
      direct/src/pyinst/icon.py
  20. 0 487
      direct/src/pyinst/imputil.py
  21. 0 91
      direct/src/pyinst/installutils.py
  22. 0 85
      direct/src/pyinst/ltoc.py
  23. 0 42
      direct/src/pyinst/mkarchive.py
  24. 0 436
      direct/src/pyinst/modulefinder.py
  25. 0 317
      direct/src/pyinst/resource.py
  26. 0 131
      direct/src/pyinst/tocfilter.py
  27. 0 1251
      direct/src/showbase/ElementTree.py
  28. 1 1
      direct/src/showbase/HTMLTree.py
  29. 0 1
      direct/src/showbase/PythonUtil.py
  30. 0 71
      direct/src/test/ModelScreenShot.py
  31. 0 6
      direct/src/test/ModelScreenShotGlobals.py
  32. 0 0
      direct/src/test/Sources.pp
  33. 0 0
      direct/src/test/__init__.py
  34. 0 12
      dtool/LocalSetup.pp
  35. 6 1
      dtool/src/dtoolbase/dtoolbase_cc.h
  36. 22 0
      dtool/src/dtoolbase/stl_compares.I
  37. 17 3
      dtool/src/dtoolbase/stl_compares.h
  38. 1 2
      dtool/src/dtoolbase/typeHandle.h
  39. 27 0
      dtool/src/dtoolbase/typeRegistry.I
  40. 7 12
      dtool/src/dtoolbase/typeRegistry.cxx
  41. 4 1
      dtool/src/dtoolbase/typeRegistry.h
  42. 0 5
      dtool/src/dtoolbase/typeRegistryNode.cxx
  43. 4 5
      makepanda/makepanda.py
  44. 2 2
      makepanda/makepandacore.py
  45. 0 3
      panda/src/audiotraits/config_fmodAudio.cxx
  46. 0 3
      panda/src/audiotraits/config_fmodAudio.h
  47. 0 4
      panda/src/audiotraits/config_openalAudio.cxx
  48. 0 3
      panda/src/audiotraits/config_openalAudio.h
  49. 0 4
      panda/src/audiotraits/fmodAudioManager.cxx
  50. 0 4
      panda/src/audiotraits/fmodAudioManager.h
  51. 0 5
      panda/src/audiotraits/fmodAudioSound.cxx
  52. 0 11
      panda/src/audiotraits/fmodAudioSound.h
  53. 0 1
      panda/src/audiotraits/fmod_audio_composite1.cxx
  54. 0 4
      panda/src/audiotraits/openalAudioManager.cxx
  55. 20 24
      panda/src/audiotraits/openalAudioManager.h
  56. 0 4
      panda/src/audiotraits/openalAudioSound.cxx
  57. 0 6
      panda/src/audiotraits/openalAudioSound.h
  58. 1 1
      panda/src/cull/drawCullHandler.cxx
  59. 1 3
      panda/src/cull/drawCullHandler.h
  60. 35 13
      panda/src/display/graphicsStateGuardian.cxx
  61. 17 0
      panda/src/express/memoryUsage.I
  62. 0 18
      panda/src/express/memoryUsage.cxx
  63. 1 1
      panda/src/express/memoryUsage.h
  64. 88 0
      panda/src/express/nodePointerTo.I
  65. 22 0
      panda/src/express/nodePointerTo.h
  66. 37 0
      panda/src/express/nodePointerToBase.I
  67. 5 0
      panda/src/express/nodePointerToBase.h
  68. 33 25
      panda/src/express/pointerTo.I
  69. 8 6
      panda/src/express/pointerTo.h
  70. 32 9
      panda/src/express/pointerToBase.I
  71. 2 1
      panda/src/express/pointerToBase.h
  72. 7 2
      panda/src/express/pointerToVoid.I
  73. 1 1
      panda/src/express/pointerToVoid.h
  74. 107 33
      panda/src/glstuff/glGraphicsStateGuardian_src.cxx
  75. 2 0
      panda/src/glstuff/glGraphicsStateGuardian_src.h
  76. 72 21
      panda/src/gobj/geom.I
  77. 23 15
      panda/src/gobj/geom.h
  78. 23 19
      panda/src/gobj/geomMunger.cxx
  79. 77 27
      panda/src/gobj/geomVertexData.I
  80. 108 104
      panda/src/gobj/geomVertexData.cxx
  81. 33 24
      panda/src/gobj/geomVertexData.h
  82. 12 1
      panda/src/linmath/lmatrix3_src.I
  83. 9 6
      panda/src/linmath/lmatrix3_src.cxx
  84. 7 6
      panda/src/linmath/lmatrix3_src.h
  85. 13 1
      panda/src/linmath/lmatrix4_src.I
  86. 9 4
      panda/src/linmath/lmatrix4_src.cxx
  87. 8 7
      panda/src/linmath/lmatrix4_src.h
  88. 2 10
      panda/src/pgraph/config_pgraph.cxx
  89. 0 1
      panda/src/pgraph/config_pgraph.h
  90. 0 25
      panda/src/pgraph/cullBin.I
  91. 0 34
      panda/src/pgraph/cullBin.cxx
  92. 0 3
      panda/src/pgraph/cullBin.h
  93. 73 0
      panda/src/pgraph/cullBinManager.I
  94. 34 25
      panda/src/pgraph/cullBinManager.cxx
  95. 13 2
      panda/src/pgraph/cullBinManager.h
  96. 1 1
      panda/src/pgraph/cullPlanes.cxx
  97. 41 21
      panda/src/pgraph/cullResult.cxx
  98. 1 1
      panda/src/pgraph/cullResult.h
  99. 60 0
      panda/src/pgraph/cullTraverser.I
  100. 18 70
      panda/src/pgraph/cullTraverser.cxx

+ 0 - 8
direct/src/autorestart/Sources.pp

@@ -1,8 +0,0 @@
-#begin bin_target
-  // This program only compiles on Unix.
-  #define BUILD_TARGET $[UNIX_PLATFORM]
-  #define C++FLAGS -DWITHIN_PANDA
-
-  #define TARGET autorestart
-  #define SOURCES autorestart.c
-#end bin_target

+ 0 - 901
direct/src/autorestart/autorestart.c

@@ -1,901 +0,0 @@
-/* Filename: autorestart.c
- * Created by:  drose (05Sep02)
- *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- *
- * PANDA 3D SOFTWARE
- * Copyright (c) Carnegie Mellon University.  All rights reserved.
- *
- * All use of this software is subject to the terms of the revised BSD
- * license.  You should have received a copy of this license along
- * with this source code in a file named "LICENSE."
- *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-#ifdef WITHIN_PANDA
-#include "dtoolbase.h"
-#endif
-
-#include <getopt.h>
-#include <stdio.h>
-#include <errno.h>
-#include <string.h>  /* for strerror */
-#include <unistd.h>
-#include <sys/types.h>
-#include <sys/wait.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <time.h>
-#include <signal.h>
-#include <stdlib.h>
-#include <assert.h>
-#include <pwd.h>
-#include <grp.h>
-
-#ifdef HAVE_LIBCURL
-#include <curl/curl.h>
-#endif
-
-/* The maximum number of seconds to wait for a process to go away
-   after issuing SIGTERM.  This is only used in watchdog mode, when -W
-   is provided on the command line. */
-#define MAX_WAITTERM_SEC 10
-
-char **params = NULL;
-char *logfile_name = NULL;
-char *pidfile_name = NULL;
-int dont_fork = 0;
-char *watchdog_url = NULL;
-int watchdog_start_sec = 0;
-int watchdog_cycle_sec = 0;
-int watchdog_timeout_sec = 0;
-char *startup_username = NULL;
-char *startup_groupname = NULL;
-char *startup_chdir = NULL;
-int logfile_fd = -1;
-int stop_on_terminate = 0;
-int stop_always = 0;
-char *respawn_script = NULL;
-int respawn_count_time = 0;
-
-/* If requested, delay these many seconds between restart attempts */
-int respawn_delay_time = 5;
-
-
-/* We shouldn't respawn more than (spam_respawn_count - 1) times over
-   spam_respawn_time seconds. */
-int spam_respawn_count = 5;
-int spam_respawn_time = 60;
-int spam_restart_delay_time = 600;  /* Optionally, do not exit if we spam too much; simply sleep for this many seconds*/
-
-
-
-pid_t child_pid = 0;
-pid_t watchdog_pid = 0;
-
-#define TIME_BUFFER_SIZE 128
-
-/* Keep track of the frequency with which we respawn, so we can report
-   this to our respawn script. */
-typedef struct respawn_record_struct {
-  time_t _time;
-  struct respawn_record_struct *_next;
-} respawn_record;
-
-respawn_record *respawns = NULL;
-
-int
-record_respawn(time_t now) {
-  /* Records the respawning event in the respawn_record, and returns
-     the number of respawns in the last respawn_count_time
-     interval. */
-  respawn_record *rec;
-  respawn_record *next;
-  int count;
-
-  if (respawn_count_time <= 0) {
-    /* We're not tracking respawns if respawn_count_time is 0. */
-    return 0;
-  }
-
-  rec = (respawn_record *)malloc(sizeof(respawn_record));
-  rec->_time = now;
-  rec->_next = respawns;
-  respawns = rec;
-
-  /* Now walk through the rest of the list and count up the number of
-     respawn events until we reach a record more than
-     respawn_count_time seconds old. */
-  count = 0;
-  while (rec->_next != NULL &&
-         (now - rec->_time) <= respawn_count_time) {
-    rec = rec->_next;
-    count++;
-  }
-
-  /* The remaining respawn records get removed. */
-  next = rec->_next;
-  rec->_next = NULL;
-  while (next != NULL) {
-    rec = next;
-    next = rec->_next;
-    free(rec);
-  }
-
-  return count;
-}
-
-void
-invoke_respawn_script(time_t now) {
-  char buffer[32];
-  char *new_command;
-  int new_command_length;
-
-  /* The process is about to be respawned; run the script that we were
-     given on the command line. */
-  if (respawn_count_time <= 0) {
-    /* We're not counting respawn times, so just run the script
-       directly. */
-    system(respawn_script);
-
-  } else {
-    /* We are counting respawn times, so append that information as a
-       parameter to the command. */
-    sprintf(buffer, " %d", record_respawn(now));
-    new_command_length = strlen(respawn_script) + strlen(buffer);
-    new_command = (char *)malloc(new_command_length + 1);
-    strcpy(new_command, respawn_script);
-    strcat(new_command, buffer);
-    assert(strlen(new_command) == new_command_length);
-
-    system(new_command);
-
-    free(new_command);
-  }
-}
-
-/* A callback function passed to libcurl that simply discards the data
-   retrieved from the server.  We only care about the HTTP status. */
-size_t 
-watchdog_bitbucket(void *ptr, size_t size, size_t nmemb, void *userdata) {
-  return size * nmemb;
-}
-
-/* Waits up to timeout_ms for a particular child to terminate.
-   Returns 0 if the timeout expires. */
-pid_t 
-waitpid_timeout(pid_t child_pid, int *status_ptr, int timeout_ms) {
-  pid_t result;
-  struct timeval now, tv;
-  int now_ms, start_ms, elapsed_ms;
-  
-  gettimeofday(&now, NULL);
-  start_ms = now.tv_sec * 1000 + now.tv_usec / 1000;
-    
-  result = waitpid(child_pid, status_ptr, WNOHANG);
-  while (result == 0) {
-    gettimeofday(&now, NULL);
-    now_ms = now.tv_sec * 1000 + now.tv_usec / 1000;
-    elapsed_ms = now_ms - start_ms;
-    
-    if (elapsed_ms > timeout_ms) {
-      /* Tired of waiting. */
-      return 0;
-    }
-    
-    /* Yield the timeslice and wait some more. */
-    tv.tv_sec = 0;
-    tv.tv_usec = 1;
-    select(0, NULL, NULL, NULL, &tv);
-    result = waitpid(child_pid, status_ptr, WNOHANG);
-  }
-  if (result == -1) {
-    perror("waitpid");
-  }
-
-  return result;
-}
-
-
-/* Poll the requested URL until a failure or timeout occurs, or until
-   the child terminates on its own.  Returns 1 on HTTP failure or
-   timeout, 0 on self-termination.  In either case, *status_ptr is
-   filled in with the status value returned by waitpid().*/
-int 
-do_watchdog(int *status_ptr) {
-#ifndef HAVE_LIBCURL
-  fprintf(stderr, "Cannot watchdog; no libcurl available.\n");
-  return 0;
-#else  /* HAVE_LIBCURL */
-
-  CURL *curl;
-  CURLcode res;
-  char error_buffer[CURL_ERROR_SIZE];
-  pid_t wresult;
-
-  // Before we start polling the URL, wait at least start milliseconds.
-  wresult = waitpid_timeout(child_pid, status_ptr, watchdog_start_sec * 1000);
-  if (wresult == child_pid) {
-    // The child terminated on its own before we got started.
-    return 0;
-  }
-
-  curl = curl_easy_init();
-  if (!curl) {
-    fprintf(stderr, "Cannot watchdog; curl failed to init.\n");
-    return 0;
-  }
-
-  curl_easy_setopt(curl, CURLOPT_URL, watchdog_url);
-  /*curl_easy_setopt(curl, CURLOPT_VERBOSE, 1);*/
-  curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, watchdog_timeout_sec * 1000);
-  curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, watchdog_bitbucket);
-  curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, error_buffer);
-  curl_easy_setopt(curl, CURLOPT_USERAGENT, "autorestart");
-  curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
-  curl_easy_setopt(curl, CURLOPT_FRESH_CONNECT, 1);
-  curl_easy_setopt(curl, CURLOPT_FORBID_REUSE, 1);
-
-  res = curl_easy_perform(curl);
-  while (res == 0) {
-    /* 0: The HTTP request finished successfully (but might or might
-       not have returned an error code like a 404). */
-    long http_response = 0;
-    curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &http_response);
-    if ((http_response / 100) != 2) {
-      /* Anything in the 200 range is deemed success.  Anything else
-         is deemed failure. */
-      fprintf(stderr, "%s returned %ld\n", watchdog_url, http_response);
-      break;
-    }
-
-    wresult = waitpid_timeout(child_pid, status_ptr, watchdog_cycle_sec * 1000);
-    if (wresult == child_pid) {
-      /* The process terminated on its own.  Return 0 to indicate this. */
-      return 0;
-    }
-
-    res = curl_easy_perform(curl);
-  }
-
-  curl_easy_cleanup(curl);
-
-  /* Failed to retrieve the watchdog URL. */
-  if (res != 0) {
-    fprintf(stderr, "Failed to contact %s: %s\n", watchdog_url, error_buffer);
-  }
-  
-  /* Kill the child process and wait for it to go away. */
-  kill(child_pid, SIGTERM);
-
-  pid_t result = waitpid_timeout(child_pid, status_ptr, MAX_WAITTERM_SEC * 1000);
-  if (result != child_pid) {
-    if (result == -1) {
-      perror("waitpid");
-    } else {
-      /* SIGTERM didn't make the process die.  Try SIGKILL. */
-      fprintf(stderr, "Force-killing child process\n");
-      kill(child_pid, SIGKILL);
-      result = waitpid_timeout(child_pid, status_ptr, MAX_WAITTERM_SEC * 1000);
-      if (result == -1) {
-        perror("waitpid");
-      }
-    }
-  }
-
-  /* Return 1 to indicate we killed the child due to an HTTP error. */
-  return 1;
-#endif  /* HAVE_LIBCURL */
-}
-
-void
-exec_process() {
-  /* First, output the command line to the log file. */
-  char **p;
-  for (p = params; *p != NULL; ++p) {
-    fprintf(stderr, "%s ", *p);
-  }
-  fprintf(stderr, "\n");
-  execvp(params[0], params);
-  fprintf(stderr, "Cannot exec %s: %s\n", params[0], strerror(errno));
-
-  /* Exit with a status of 0, to indicate to the parent process that
-     we should stop. */
-  exit(0); 
-}
-
-int
-spawn_process() {
-  /* Spawns the child process.  Returns true if the process terminated
-     by itself and should be respawned, false if it was explicitly
-     killed (or some other error condition exists), and it should not
-     respawn any more. */
-  pid_t wresult;
-  int status;
-  int error_exit;
-
-  child_pid = fork();
-  if (child_pid < 0) {
-    /* Fork error. */
-    perror("fork");
-    return 0;
-  }
-
-  if (child_pid == 0) {
-    /* Child.  Exec the process. */
-    fprintf(stderr, "Child pid is %d.\n", getpid());
-    exec_process();
-    /* Shouldn't get here. */
-    exit(1);
-  }
-
-  /* Parent. */
-
-  error_exit = 0;
-
-  if (watchdog_url != NULL) {
-    /* If we're watchdogging, then go check the URL.  This function
-       won't return until the URL fails or the child exits. */
-    error_exit = do_watchdog(&status);
-
-  } else {
-    /* If we're not watchdogging, then just wait for the child to
-       terminate, and diagnose the reason. */
-    wresult = waitpid(child_pid, &status, 0);
-    if (wresult < 0) {
-      perror("waitpid");
-      return 0;
-    }
-  }
-
-  /* Now that we've returned from waitpid, clear the child pid number
-     so our signal handler doesn't get too confused. */
-  child_pid = 0;
-
-  if (error_exit) {
-    /* An HTTP error exit is a reason to respawn. */
-    return 1;
-
-  } else if (WIFSIGNALED(status)) {
-    int signal = WTERMSIG(status);
-    fprintf(stderr, "\nprocess caught signal %d.\n\n", signal);
-    /* A signal exit is a reason to respawn unless the signal is TERM
-       or KILL. */
-    return !stop_on_terminate || (signal != SIGTERM && signal != SIGKILL);
-
-  } else {
-    int exit_status = WEXITSTATUS(status);
-    fprintf(stderr, "\nprocess exited with status %d.\n\n", WEXITSTATUS(status));
-    /* Normal exit is a reason to respawn if the status indicates failure. */
-    return !stop_on_terminate || (exit_status != 0);
-  }
-}
-
-void
-sigterm_handler() {
-  pid_t wresult;
-  int status;
-  time_t now;
-  char time_buffer[TIME_BUFFER_SIZE];
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-
-  fprintf(stderr, "\nsigterm caught at %s; shutting down.\n", time_buffer);
-  if (child_pid == 0) {
-    fprintf(stderr, "no child process.\n\n");
-
-  } else {
-    kill(child_pid, SIGTERM);
-
-    wresult = waitpid(child_pid, &status, 0);
-    if (wresult < 0) {
-      perror("waitpid");
-    } else {
-      fprintf(stderr, "child process terminated.\n\n");
-    }
-  }
-  exit(1);
-}
-
-void
-sighup_handler() {
-  time_t now;
-  char time_buffer[TIME_BUFFER_SIZE];
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-
-  fprintf(stderr, "\nsighup caught at %s.\n", time_buffer);
-  if (child_pid == 0) {
-    fprintf(stderr, "no child process.\n\n");
-
-  } else {
-    kill(child_pid, SIGHUP);
-  }
-}
-
-void 
-sigalarm_handler() {
-  fprintf(stderr, "sleep epoch was complete.\n");
-}
-
-void
-do_autorestart() {
-  char time_buffer[TIME_BUFFER_SIZE];
-  time_t now;
-  time_t *spam_respawn = NULL;
-  int sri, num_sri;
-  struct sigaction sa;
-
-  if (spam_respawn_count > 1) {
-    spam_respawn = (time_t *)malloc(sizeof(time_t) * spam_respawn_count);
-  }
-
-  /* Make our process its own process group. */
-  setpgid(0, 0);
-
-  /* Set up a signal handler to trap SIGTERM. */
-  sa.sa_handler = sigterm_handler;
-  sigemptyset(&sa.sa_mask);
-  sa.sa_flags = 0;
-  if (sigaction(SIGTERM, &sa, NULL) < 0) {
-    perror("sigaction");
-  }
-
-  /* Set up a signal handler to trap SIGHUP.  We pass this into the
-     child. */
-  sa.sa_handler = sighup_handler;
-  sigemptyset(&sa.sa_mask);
-  sa.sa_flags = 0;
-  if (sigaction(SIGHUP, &sa, NULL) < 0) {
-    perror("sigaction");
-  }
-
-  if (logfile_fd >= 0) {
-    /* If we have a logfile, dup it onto stdout and stderr. */
-    dup2(logfile_fd, STDOUT_FILENO);
-    dup2(logfile_fd, STDERR_FILENO);
-    close(logfile_fd);
-  }
-
-  /* Make sure stdin is closed. */
-  close(STDIN_FILENO);
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-  fprintf(stderr, "autorestart begun at %s.\n", time_buffer);
-
-  if (pidfile_name != NULL) {
-    unlink(pidfile_name);
-    FILE *pidfile = fopen(pidfile_name, "w");
-    if (pidfile == NULL) {
-      fprintf(stderr, "Could not write pidfile %s\n", pidfile_name);
-    } else {
-      fprintf(pidfile, "%d\n", getpid());
-      fclose(pidfile);
-    }
-  }
-
-  sri = 1;
-  num_sri = 1;
-  if (spam_respawn_count > 1) {
-    spam_respawn[1] = now;
-  }
-  
-  while (spawn_process()) {
-    now = time(NULL);
-
-    if (respawn_script != NULL) {
-      invoke_respawn_script(now);
-    }
-    
-    if (respawn_delay_time) {
-      sleep(respawn_delay_time);
-    }
-
-    /* Make sure we're not respawning too fast. */
-    if (spam_respawn_count > 1) {
-      sri = (sri + 1) % spam_respawn_count;
-      spam_respawn[sri] = now;
-      if (num_sri < spam_respawn_count) {
-        num_sri++;
-      } else {
-        time_t last = spam_respawn[(sri + 1) % spam_respawn_count];
-        if (now - last < spam_respawn_time) 
-        {
-          if(!spam_restart_delay_time) 
-          {
-            fprintf(stderr, "respawning too fast, giving up.\n");
-            break;
-          } 
-          else 
-          {
-            num_sri = 1; /* reset num_sri */
-            fprintf(stderr, "respawning too fast, will sleep for %d seconds.\n", spam_restart_delay_time);
-            signal (SIGALRM, sigalarm_handler);
-            alarm(spam_restart_delay_time);
-            pause();
-            signal (SIGALRM, SIG_IGN);
-          }
-        }
-      }
-    }
-    
-    if (stop_always) {
-      fprintf(stderr, "instructed to not autorestart, exiting.\n");
-      break;
-    }
-      
-    strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-    fprintf(stderr, "respawning at %s.\n", time_buffer);
-  }
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-  fprintf(stderr, "autorestart terminated at %s.\n", time_buffer);
-  exit(0);
-}
-
-void
-double_fork() {
-  pid_t child, grandchild, wresult;
-  int status;
-
-  /* Fork once, then again, to disassociate the child from the command
-     shell process group. */
-  child = fork();
-  if (child < 0) {
-    /* Failure to fork. */
-    perror("fork");
-    exit(1);
-  }
-
-  if (child == 0) {
-    /* Child.  Fork again. */
-    grandchild = fork();
-    if (grandchild < 0) {
-      perror("fork");
-      exit(1);
-    }
-
-    if (grandchild == 0) {
-      /* Grandchild.  Begin useful work. */
-      do_autorestart();
-      /* Shouldn't get here. */
-      exit(1);
-    }
-
-    /* Child.  Report the new pid, then terminate gracefully. */
-    fprintf(stderr, "Spawned, monitoring pid is %d.\n", grandchild);
-    exit(0);
-  }
-
-  /* Parent.  Wait for the child to terminate, then return. */
-  wresult = waitpid(child, &status, 0);
-  if (wresult < 0) {
-    perror("waitpid");
-    exit(1);
-  }
-
-  if (!WIFEXITED(status)) {
-    if (WIFSIGNALED(status)) {
-      fprintf(stderr, "child caught signal %d unexpectedly.\n", WTERMSIG(status));
-    } else {
-      fprintf(stderr, "child exited with status %d.\n", WEXITSTATUS(status));
-    }
-    exit(1);
-  }
-}
-
-void
-usage() {
-  fprintf(stderr,
-          "\n"
-          "autorestart [opts] program [args . . . ]\n"
-          "autorestart -h\n\n");
-}
-
-void
-help() {
-  usage();
-  fprintf(stderr,
-          "This program is used to run a program as a background task and\n"
-          "automatically restart it should it terminate for any reason other\n"
-          "than normal exit or explicit user kill.\n\n"
-
-          "If the program exits with a status of 0, indicating successful\n"
-          "completion, it is not restarted.\n\n"
-
-          "If the program is terminated via a TERM or KILL signal (e.g. via\n"
-          "kill [pid] or kill -9 [pid]), it is assumed the user meant for the\n"
-          "process to stop, and it is not restarted.\n\n"
-
-          "Options:\n\n"
-
-          "  -l logfilename\n"
-          "     Route stdout and stderr from the child process into the indicated\n"
-          "     log file.\n\n"
-
-          "  -p pidfilename\n"
-          "     Write the pid of the monitoring process to the indicated pidfile.\n\n"
-          "  -f\n"
-          "     Don't fork autorestart itself; run it as a foreground process. \n"
-          "     (Normally, autorestart forks itself to run as a background process.)\n"
-          "     In this case, the file named by -p is not used.\n\n"
-          
-          "  -n\n"
-          "     Do not attempt to restart the process under any circumstance.\n"
-          "     The program can still be used to execute a script on abnormal\n"
-          "     process termination.\n\n"
-
-          "  -t\n"
-          "     Stop on terminate: don't restart if the child process exits\n"
-          "     normally or is killed with a SIGTERM.  With this flag, the\n"
-          "     child process will be restarted only if it exits with a\n"
-          "     non-zero exit status, or if it is killed with a signal other\n"
-          "     than SIGTERM.  Without this flag, the default behavior is to\n"
-          "     restart the child process if it exits for any reason.\n\n"
-
-          "  -r count,secs,sleep\n"
-          "     Sleep 'sleep' seconds if the process respawns 'count' times\n"
-          "     within 'secs' seconds.  This is designed to prevent respawning\n"
-          "     from using too many system resources if something is wrong with\n"
-          "     the child process.  The default value is %d,%d,%d. Use -r 0,0,0\n"
-          "     to disable this feature.\n\n"
-
-          "  -s \"command\"\n"
-          "     Run the indicated command or script each time the process is\n"
-          "     respawned, using the system() call.  This may be useful, for\n"
-          "     instance, to notify an operator via email each time a respawn\n"
-          "     occurs.  If -c is also specified, an additional parameter will\n"
-          "     be appended to the command, indicating the number of times the\n"
-          "     respawn has occurred in the given time interval.\n\n"
-
-          "  -c secs\n"
-          "     Specifies the number of seconds over which to count respawn events\n"
-          "     for the purposes of passing an argument to the script named with\n"
-          "     -s.\n\n"
-
-          "  -d secs\n"
-          "     Specifies the number of seconds to delay for between restarts.\n"
-          "     The default is %d.\n\n"
-
-#ifdef HAVE_LIBCURL
-          "  -W watchdog_url,start,cycle,timeout\n"
-          "     Specifies an optional URL to watch while waiting for the process\n"
-          "     to terminate.  If this is specified, autorestart will start the process,\n"
-          "     wait start seconds, and then repeatedly poll the indicated URL\n"
-          "     every cycle seconds.  If a HTTP failure code is detected,\n"
-          "     or no response is received within timeout seconds, then the\n"
-          "     child is terminated and restarted.  The start, cycle, and timeout\n"
-          "     parameters are all required.\n\n"
-#endif  /* HAVE_LIBCURL */
-
-          "  -U username\n"
-          "     Change to the indicated user upon startup.  The logfile is still\n"
-          "     created as the initial user.\n\n"
-
-          "  -G groupname\n"
-          "     Change to the indicated group upon startup.\n\n"
-
-          "  -D dirname\n"
-          "     Change to the indicated working directory upon startup.  The logfile\n"
-          "     is still created relative to the initial startup directory.\n\n"
-
-          "  -h\n"
-          "     Output this help information.\n\n",
-          spam_respawn_count, spam_respawn_time, spam_restart_delay_time, respawn_delay_time);
-}
-
-void
-parse_int_triplet(char *param, int *a, int *b, int *c) {
-  char *comma;
-  char *comma2;
-  
-  comma = strchr(param, ',');
-  if (comma == NULL) {
-    fprintf(stderr, "Comma required: %s\n", param);
-    exit(1);
-  }
-
-  comma2 = strchr(comma+1, ',');
-  if (comma2 == NULL) {
-    fprintf(stderr, "Second comma required: %s\n", param);
-    exit(1);
-  }
-
-  *comma = '\0';
-  *comma2 = '\0';
-  
-  *a = atoi(param);
-  *b = atoi(comma + 1);
-  *c = atoi(comma2 + 1);
-}
-
-void 
-parse_watchdog(char *param) {
-  char *comma;
-  char *comma2;
-  char *comma3;
-
-#ifndef HAVE_LIBCURL
-  fprintf(stderr, "-W requires autorestart to have been compiled with libcurl support.\n");
-  exit(1);
-#endif  /* HAVE_LIBCURL */
-
-  comma = strrchr(param, ',');
-  if (comma == NULL) {
-    fprintf(stderr, "Comma required: %s\n", param);
-    exit(1);
-  }
-  *comma = '\0';
-
-  comma2 = strrchr(param, ',');
-  if (comma2 == NULL) {
-    *comma = ',';
-    fprintf(stderr, "Second comma required: %s\n", param);
-    exit(1);
-  }
-  *comma2 = '\0';
-
-  comma3 = strrchr(param, ',');
-  if (comma3 == NULL) {
-    *comma = ',';
-    *comma2 = ',';
-    fprintf(stderr, "Third comma required: %s\n", param);
-    exit(1);
-  }
-  *comma3 = '\0';
-
-  watchdog_url = param;
-  watchdog_start_sec = atoi(comma3 + 1);
-  watchdog_cycle_sec = atoi(comma2 + 1);
-  watchdog_timeout_sec = atoi(comma + 1);
-}
-
-
-int 
-main(int argc, char *argv[]) {
-  extern char *optarg;
-  extern int optind;
-  /* The initial '+' instructs GNU getopt not to reorder switches. */
-  static const char *optflags = "+l:p:fntr:s:c:d:W:U:G:D:h";
-  int flag;
-
-  flag = getopt(argc, argv, optflags);
-  while (flag != EOF) {
-    switch (flag) {
-    case 'l':
-      logfile_name = optarg;
-      break;
-
-    case 'p':
-      pidfile_name = optarg;
-      break;
-
-    case 'f':
-      dont_fork = 1;
-      break;
-
-    case 'n':
-      stop_always = 1;
-      break;
-
-    case 't':
-      stop_on_terminate = 1;
-      break;
-
-    case 'r':
-      parse_int_triplet(optarg, &spam_respawn_count, &spam_respawn_time, &spam_restart_delay_time);
-      break;
-
-    case 's':
-      respawn_script = optarg;
-      break;
-
-    case 'c':
-      respawn_count_time = atoi(optarg);
-      break;
-
-    case 'd':
-      respawn_delay_time = atoi(optarg);
-      break;
-
-    case 'W':
-      parse_watchdog(optarg);
-      break;
-
-    case 'U':
-      startup_username = optarg;
-      break;
-
-    case 'G':
-      startup_groupname = optarg;
-      break;
-
-    case 'D':
-      startup_chdir = optarg;
-      break;
-      
-    case 'h':
-      help();
-      return 1;
-
-    case '?':
-    case '+':
-      usage();
-      return 1;
-
-    default:
-      fprintf(stderr, "Unhandled switch: -%c\n", flag);
-      return 1;
-    }
-    flag = getopt(argc, argv, optflags);
-  }
-
-  argc -= (optind - 1);
-  argv += (optind - 1);
-
-  if (argc < 2) {
-    fprintf(stderr, "No program to execute given.\n");
-    usage();
-    return 1;
-  }
-
-  params = &argv[1];
-
-  if (logfile_name != NULL) {
-    logfile_fd = open(logfile_name, O_WRONLY | O_CREAT | O_TRUNC, 0666);
-    if (logfile_fd < 0) {
-      fprintf(stderr, "Cannot write to logfile %s: %s\n", 
-              logfile_name, strerror(errno));
-      return 1;
-    }
-    fprintf(stderr, "Generating output to %s.\n", logfile_name);
-  }
-
-  if (startup_chdir != NULL) {
-    if (chdir(startup_chdir) != 0) {
-      perror(startup_chdir);
-      return 1;
-    }
-  }
-
-  if (startup_groupname != NULL) {
-    struct group *grp;
-    grp = getgrnam(startup_groupname);
-    if (grp == NULL) {
-      perror(startup_groupname);
-      return 1;
-    }
-
-    if (setgid(grp->gr_gid) != 0) {
-      perror(startup_groupname);
-      return 1;
-    }
-  }
-
-  if (startup_username != NULL) {
-    struct passwd *pwd;
-    pwd = getpwnam(startup_username);
-    if (pwd == NULL) {
-      perror(startup_username);
-      return 1;
-    }
-
-    if (setuid(pwd->pw_uid) != 0) {
-      perror(startup_username);
-      return 1;
-    }
-  }
-
-  if (dont_fork) {
-    do_autorestart();
-  } else {
-    double_fork();
-  }
-
-  return 0;
-}
-

+ 1 - 2
direct/src/directscripts/Doxyfile.python

@@ -643,8 +643,7 @@ RECURSIVE              = YES
 # excluded from the INPUT source files. This way you can easily exclude a 
 # subdirectory from a directory tree whose root is specified with the INPUT tag.
 
-EXCLUDE                = built/direct/test \
-                         built/direct/plugin \
+EXCLUDE                = built/direct/plugin \
                          built/direct/plugin_npapi \
                          built/direct/plugin_activex \
                          built/direct/plugin_installer \

+ 0 - 29
direct/src/heapq/Sources.pp

@@ -1,29 +0,0 @@
-// DIR_TYPE "metalib" indicates we are building a shared library that
-// consists mostly of references to other shared libraries.  Under
-// Windows, this directly produces a DLL (as opposed to the regular
-// src libraries, which don't produce anything but a pile of OBJ files
-// under Windows).
-
-#define DIR_TYPE metalib
-
-// This directory strictly contains a Python utility; therefore, only
-// build it if we actually have Python.
-#define BUILD_DIRECTORY $[HAVE_PYTHON]
-
-
-#define OTHER_LIBS \
-  pandaexpress:m \
-  p3dconfig:c p3dtoolconfig:m \
-  p3dtoolutil:c p3dtoolbase:c p3prc:c p3dtool:m
-
-#begin metalib_target
-  #define TARGET p3heapq
-
-  // Tell ppremake to treat this file as if it had been generated via
-  // interrogate.  On OSX, this will move it into the .so, instead of
-  // the .dylib, so that it can be imported into Python.
-  #define PYTHON_MODULE_ONLY 1
-
-  #define SOURCES heapq.cxx
-#end metalib_target
-

+ 0 - 240
direct/src/heapq/heapq.cxx

@@ -1,240 +0,0 @@
-
-/* Note: This module can probably go away when we upgrade to Python 2.4.
-   Python 2.3 has a heapq implementation, but it is in Python. This is
-   reported to be about 20x faster. In 2.4 they reimplemented heapq in C so
-   it should be comparable to this. At this time though, Python 2.4 is
-   still in alpha.
-   
-   Note: This code has been bastardized to only work on Tasks temporarily.
-
-*/
-
-#include <Python.h>
-
-/* Prototypes */
-static PyObject * heappush(PyObject *self, PyObject *args);
-static PyObject * heappop(PyObject *self, PyObject *args);
-static PyObject * heapreplace(PyObject *self, PyObject *args);
-static PyObject * heapify(PyObject *self, PyObject *args);
-static int _siftdown(PyObject *list, int startpos, int pos);
-static int _siftup(PyObject *list, int pos);
-
-#ifdef _WIN32
-extern "C" __declspec(dllexport) void initlibheapq(void);
-extern "C" __declspec(dllexport) void initlibp3heapq(void);
-#else
-extern "C" void initlibheapq();
-extern "C" void initlibp3heapq();
-#endif
-
-static PyObject *
-heappush(PyObject *self, PyObject *args) {
-    int len;
-    PyObject *list = NULL;
-    PyObject *node = NULL;
-    
-    if (!PyArg_ParseTuple(args,"O!O",&PyList_Type,&list,&node))
-        return NULL;
-
-    len = PyList_Size(list);
-    if (PyList_Append(list,node))
-        return NULL;
-    
-    if (_siftdown(list,0,len))
-        return NULL;
-    
-    Py_INCREF(Py_None);
-    return Py_None;
-}
-
-static PyObject *
-heappop(PyObject *self, PyObject *args) {
-    PyObject *list = NULL;
-    PyObject *node = NULL;
-    PyObject *returnNode = NULL;
-    int len;
-    
-    if (!PyArg_ParseTuple(args,"O!",&PyList_Type,&list))
-        return NULL;
-
-    len = PyList_Size(list);
-    if (len == 0) {
-        /* Special-case most common failure cause */
-        PyErr_SetString(PyExc_IndexError, "pop from empty list");
-        return NULL;
-    }
-
-    node = PySequence_GetItem(list,-1);
-    PySequence_DelItem(list,-1);
-
-    len -= 1;
-    if (len > 0) {
-        returnNode = PySequence_GetItem(list,0);
-        PyList_SetItem(list,0,node);
-        if (_siftup(list,0))
-            return NULL;
-    } else {
-        returnNode = node;
-    }
-    
-    return returnNode;
-}
-
-static PyObject * 
-heapreplace(PyObject *self, PyObject *args) {
-    PyObject *list = NULL;
-    PyObject *node = NULL;
-    PyObject *returnNode = NULL;
-    int len;
-    
-    if (!PyArg_ParseTuple(args,"O!O",&PyList_Type,&list,&node))
-        return NULL;
-
-    len = PyList_Size(list);
-    if (len == 0) {
-        /* Special-case most common failure cause */
-        PyErr_SetString(PyExc_IndexError, "replace on an empty list");
-        return NULL;
-    }
-
-    returnNode = PySequence_GetItem(list,0);
-    PySequence_SetItem(list,0,node);
-    if (_siftup(list,0))
-        return NULL;
-
-    return returnNode;
-}
-
-static PyObject *
-heapify(PyObject *self, PyObject *args) {
-    int n, i;
-    PyObject *list;
-
-    if (!PyArg_ParseTuple(args,"O!",&PyList_Type,&list))
-        return NULL;
-    n = (PyList_Size(list)/2)-1;
-    
-    for (i=n;i>=0;i--) {
-        if (_siftup(list,i))
-            return NULL;
-    }
-
-    Py_INCREF(Py_None);
-    return Py_None;
-}
-
-static int
-_siftdown(PyObject *list, int startpos, int pos) {
-    PyObject *newitem, *parent;
-    int parentpos;
-
-    newitem = PySequence_GetItem(list,pos);
-
-    PyObject *newitem_wakeTime_obj = PyObject_GetAttrString(newitem, "wakeTime");
-    double newitem_wakeTime = 0.0;
-    if (newitem_wakeTime_obj != NULL) {
-      newitem_wakeTime = PyFloat_AS_DOUBLE(newitem_wakeTime_obj);
-      Py_DECREF(newitem_wakeTime_obj);
-    }
-
-    while (pos > startpos) {
-        parentpos = (pos - 1) >> 1;
-        parent = PyList_GetItem(list,parentpos);
-
-        /*
-        cmp = PyObject_RichCompareBool(parent,newitem,Py_LE);
-        if (cmp > 0)
-            break;
-        else if (cmp < 0)
-            return -1;
-        */
-
-        PyObject *parent_wakeTime_obj = PyObject_GetAttrString(parent, "wakeTime");
-        double parent_wakeTime = 0.0;
-        if (parent_wakeTime_obj != NULL) {
-          parent_wakeTime = PyFloat_AS_DOUBLE(parent_wakeTime_obj);
-          Py_DECREF(parent_wakeTime_obj);
-        }
-
-        if (parent_wakeTime <= newitem_wakeTime) {
-          break;
-        }
-
-        Py_INCREF(parent);
-        PyList_SetItem(list,pos,parent);
-        pos = parentpos;
-    }
-    PyList_SetItem(list,pos,newitem);
-    return 0;
-}
-
-static int
-_siftup(PyObject *list, int pos) {
-    PyObject *newitem, *right, *child;
-    int endpos, rightpos, childpos;
-    int startpos = pos;
-    
-    endpos = PyList_Size(list);
-    newitem = PySequence_GetItem(list,pos);
-    
-    childpos = (2*pos)+1;
-    while (childpos < endpos) {
-        rightpos = childpos + 1;
-        child = PySequence_Fast_GET_ITEM(list,childpos);
-
-        PyObject *child_wakeTime_obj = PyObject_GetAttrString(child, "wakeTime");
-        double child_wakeTime = 0.0;
-        if (child_wakeTime_obj != NULL) {
-          child_wakeTime = PyFloat_AS_DOUBLE(child_wakeTime_obj);
-          Py_DECREF(child_wakeTime_obj);
-        }
-
-
-        if (rightpos < endpos) {
-            right = PySequence_Fast_GET_ITEM(list,rightpos);
-
-            PyObject *right_wakeTime_obj = PyObject_GetAttrString(right, "wakeTime");
-            double right_wakeTime = 0.0;
-            if (right_wakeTime_obj != NULL) {
-              right_wakeTime = PyFloat_AS_DOUBLE(right_wakeTime_obj);
-              Py_DECREF(right_wakeTime_obj);
-            }
-
-            /*
-            cmp = PyObject_RichCompareBool(right,child,Py_LE);
-            if (cmp > 0)
-              childpos = rightpos;
-            else if (cmp < 0)
-              return -1;
-            */
-
-            if (right_wakeTime <= child_wakeTime) {
-              childpos = rightpos;
-            }
-        }
-        child = PySequence_GetItem(list,childpos);
-        PyList_SetItem(list,pos,child);
-        pos = childpos;
-        childpos = (2*pos)+1;
-    }
-    PyList_SetItem(list,pos,newitem);
-
-    return _siftdown(list,startpos,pos);
-}
-
-static PyMethodDef heapqcMethods[] = {
-    {"heappush",heappush,METH_VARARGS},
-    {"heappop",heappop,METH_VARARGS},
-    {"heapreplace",heapreplace,METH_VARARGS},
-    {"heapify",heapify,METH_VARARGS},
-    {NULL, NULL} /* Sentinel */
-};
-
-void initlibheapq(void) {
-    (void) Py_InitModule("libheapq", heapqcMethods);
-};
-
-void initlibp3heapq(void) {
-    (void) Py_InitModule("libp3heapq", heapqcMethods);
-};
-

+ 1 - 1
direct/src/http/LandingPage.py

@@ -3,7 +3,7 @@ from direct.directnotify.DirectNotifyGlobal import directNotify
 from pandac.PandaModules import VirtualFileSystem
 from pandac.PandaModules import Filename
 from pandac.PandaModules import DSearchPath
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 import LandingPageHTML
 from StringIO import StringIO
 

+ 1 - 1
direct/src/http/LandingPageHTML.py

@@ -1,6 +1,6 @@
 # -- Text content for the landing page.  You should change these for yours! --
 
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 
 title = "Landing Page"
 defaultTitle = title

+ 1 - 1
direct/src/http/WebRequest.py

@@ -4,7 +4,7 @@ from direct.directnotify.DirectNotifyGlobal import directNotify
 from direct.task.TaskManagerGlobal import taskMgr
 from direct.task import Task
 from LandingPage import LandingPage
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 
 notify = directNotify.newCategory('WebRequestDispatcher')
 

+ 0 - 504
direct/src/pyinst/Builder.py

@@ -1,504 +0,0 @@
-import string
-import pprint
-import sys
-import os
-import ConfigParser
-import pprint
-import shutil
-import tempfile
-import ltoc
-import tocfilter
-import resource
-import archive
-import archivebuilder
-import carchive
-
-logfile = None
-autopath = []
-built = {}
-copyFile = None
-
-class Target:
-    def __init__(self, cfg, sectnm, cnvrts):
-        self.children = []
-        self._dependencies = ltoc.lTOC() # the stuff an outer package will need to use me
-        self.cfg = cfg
-        self.__name__ = 'joe'
-        for optnm in cfg.options(sectnm):
-            cnvrt = cnvrts.get(optnm, 'getstringlist')
-            if cnvrt:
-                f = getattr(self, cnvrt, None)
-                if f:
-                    self.__dict__[optnm] = f(cfg.get(sectnm, optnm))
-        if not hasattr(self, 'name'):
-            self.name = self.__name__
-        print "Initializing", self.__name__
-        self.pathprefix = autopath + self.pathprefix
-        self.pathprefix.append(os.path.join(pyinsthome, 'support'))
-        for z in self.zlib:
-            if z in self.cfg.sections():
-                self.children.append(z)
-            else:
-                raise ValueError, "%s - zlib '%s' does not refer to a sections" \
-                      % (self.name, z)
-        for i in range(len(self.misc)):
-            x = self.misc[i]
-            if x in self.cfg.sections():
-                if self.cfg.get(x, "type") == 'PYZ':
-                    self.zlib.append(x)
-                    self.misc[i] = None
-                self.children.append(x)
-        self.misc = filter(None, self.misc)
-        self.edit()
-        self.toc = ltoc.lTOC()
-        for thingie in self.excludes:
-            try:
-                fltr = tocfilter.makefilter(thingie, self.pathprefix)
-            except ValueError:
-                print "Warning: '%s' not found - no filter created" % thingie
-            else:
-                self.toc.addFilter(fltr)
-        if self.exstdlib:
-            self.toc.addFilter(tocfilter.StdLibFilter())
-        if self.extypes:
-            self.toc.addFilter(tocfilter.ExtFilter(self.extypes))
-        if self.expatterns:
-            self.toc.addFilter(tocfilter.PatternFilter(self.expatterns))
-
-        ##------utilities------##
-    def dump(self):
-        logfile.write("---- %s: %s -----\n" % (self.__class__.__name__, self.name))
-        pprint.pprint(self.__dict__, logfile)
-    def getstringlist(self, opt):
-        tmp = string.split(opt, ',')
-        return filter(None, map(string.strip, tmp))
-    def getstring(self, opt):
-        return opt
-    def getbool(self, opt):
-        if opt in ('0','f','F','n','N'):
-            return 0
-        return 1
-        ##-----framework-----##
-    def build(self):
-        print "Gathering components of %s" % self.name
-        self.gather()
-        logfile.write("Final Table of Contents for %s:\n" % self.name)
-        pprint.pprint(self.toc.toList(), logfile)
-        print "Creating %s" % self.name
-        self.assemble()
-        ##-----overrideables-----##
-    def edit(self):
-        pass
-    def gather(self):
-        pass
-    def assemble(self):
-        pass
-
-class PYZTarget(Target):
-    def __init__(self, cfg, sectnm, cnvrts):
-        Target.__init__(self, cfg, sectnm, cnvrts)
-        # to use a PYZTarget, you'll need imputil and archive
-        archivebuilder.GetCompiled([os.path.join(pyinsthome, 'imputil.py')])
-        print "pyinsthome:", pyinsthome
-        imputil = resource.makeresource('imputil.py', [pyinsthome])
-        self._dependencies.append(imputil)
-        archivebuilder.GetCompiled([os.path.join(pyinsthome, 'archive_rt.py')])
-        archmodule = resource.makeresource('archive_rt.py', [pyinsthome])
-        self._dependencies.merge(archmodule.dependencies())
-        self._dependencies.append(archmodule)
-        self.toc.addFilter(archmodule)
-        self.toc.addFilter(imputil)
-        for mod in archmodule.modules:
-            self.toc.addFilter(mod)
-    def edit(self):
-        if self.extypes:
-            print "PYZ target %s ignoring extypes = %s" % (self.__name__, self.extypes)
-
-    def gather(self):
-        for script in self.dependencies:
-            rsrc = resource.makeresource(script, self.pathprefix)
-            if not isinstance(rsrc, resource.scriptresource):
-                print "Bug alert - Made %s from %s!" % (rsrc, script)
-            self.toc.merge(rsrc.modules)
-        logfile.write("lTOC after expanding 'depends':\n")
-        pprint.pprint(self.toc.toList(), logfile)
-        for thingie in self.includes + self.directories + self.packages:
-            rsrc = resource.makeresource(thingie, self.pathprefix)
-##            if not isinstance(rsrc, resource.pythonresource):
-##                print "PYZ target %s ignoring include %s" % (self.name, thingie)
-##            else:
-            self.toc.merge(rsrc.contents())
-        logfile.write("lTOC after includes, dir, pkgs:\n")
-        pprint.pprint(self.toc.toList(), logfile)
-        self.toc.addFilter(tocfilter.ExtFilter(['.py', '.pyc', '.pyo'], 1))
-        logfile.write("Applying the following filters:\n")
-        pprint.pprint(self.toc.filters, logfile)
-        self.toc.filter()
-
-    def assemble(self):
-        contents = self.toc.toList()
-        if contents:
-            lib = archive.ZlibArchive()
-            lib.build(self.name, archivebuilder.GetCompiled(self.toc.toList()))
-
-class CollectTarget(Target):
-    def __init__(self, cfg, sectnm, cnvrts):
-        Target.__init__(self, cfg, sectnm, cnvrts)
-
-    _rsrcdict = {'COLLECT': resource.dirresource, 'PYZ': resource.zlibresource, 'CARCHIVE': resource.archiveresource}
-
-    def gather(self):
-        if self.support:
-            # the bare minimum
-            self.toc.merge([resource.makeresource('python20.dll')])
-            self.toc.merge([resource.makeresource('exceptions.pyc').asBinary()])
-        # zlib, bindepends, misc, trees, destdir
-        for i in range(len(self.zlib)):
-            # z refers to the section name
-            z = self.zlib[i]
-            nm = self.cfg.get(z, 'name')
-            try:
-                self.toc.merge([resource.makeresource(nm, ['.'])])
-            except ValueError:
-                # zlibs aren't written if they turn out to be empty
-                self.zlib[i] = None
-        self.zlib = filter(None, self.zlib)
-        if self.zlib:
-            target = built.get(self.zlib[0], None)
-            if target:
-                self.toc.merge(target._dependencies)
-        for script in self.bindepends:
-            rsrc = resource.makeresource(script, self.pathprefix)
-            self.toc.merge(rsrc.binaries)
-        logfile.write('ltoc after bindepends:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        for thingie in self.misc:
-            if thingie in self.cfg.sections():
-                name = self.cfg.get(thingie, "name")
-                typ = self.cfg.get(thingie, "type")
-                klass = self._rsrcdict.get(typ, resource.dataresource)
-                rsrc = apply(klass, (name, name))
-                #now make sure we have the stuff the resource requires
-                target = built.get(thingie, None)
-                if target:
-                    self.toc.merge(target._dependencies)
-            else:
-                rsrc = resource.makeresource(thingie, self.pathprefix)
-            self.toc.merge(rsrc.contents())
-        logfile.write('ltoc after misc:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        for script in self.script:
-            if string.find(script, '.') == -1:
-                script = script + '.py'
-            rsrc = resource.makeresource(script, self.pathprefix)
-            if rsrc.typ == 'm':
-                rsrc.typ = 's'
-            self.toc.merge([rsrc])
-        logfile.write('ltoc after scripts:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        for tree in self.trees:
-            try:
-                rsrc = resource.treeresource('.', tree)
-            except ValueError:
-                print "tree %s not found" % tree
-            else:
-                self.toc.merge(rsrc.contents())
-        logfile.write('ltoc after trees:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        self.toc.addFilter(tocfilter.TypeFilter(['d']))
-        logfile.write("Applying the following filters:\n")
-        pprint.pprint(self.toc.filters, logfile)
-        self.toc.filter()
-        #don't dupe stuff in a zlib that's part of this target
-        if self.zlib:
-           ztoc = ltoc.lTOC()
-           for zlibnm in self.zlib:
-               target = built.get(zlibnm, None)
-               if target:
-                   ztoc.merge(target.toc)
-           for i in range(len(self.toc)-1, -1, -1):
-               rsrc = self.toc[i]
-               if isinstance(rsrc, resource.moduleresource) and rsrc in ztoc:
-                   del self.toc[i]
-
-    def assemble(self):
-        if os.path.exists(self.name):
-            if os.path.isdir(self.name):
-                for fnm in os.listdir(self.name):
-                    try:
-                        os.remove(os.path.join(self.name, fnm))
-                    except:
-                        print "Could not delete file %s" % os.path.join(self.name, fnm)
-        else:
-            os.makedirs(self.name)
-        mysite = []
-        for nm, path, typ in self.toc.toList():
-            shutil.copy2(path, self.name)
-            if typ == 'z':
-                mysite.append('imputil.FuncImporter(archive.ZlibArchive("%s", 0).get_code).install()' % nm)
-        if mysite:
-            mysite.insert(0, 'import archive, imputil')
-            open(os.path.join(self.name, 'site.py'),'w').write(string.join(mysite, '\n'))
-
-
-class ArchiveTarget(CollectTarget):
-    usefullname = 1
-    def __init__(self, cfg, sectnm, cnvrts):
-        CollectTarget.__init__(self, cfg, sectnm, cnvrts)
-        archivebuilder.GetCompiled([os.path.join(pyinsthome, 'carchive_rt.py')])
-        carchmodule = resource.makeresource('carchive_rt.py', [pyinsthome])
-        self._dependencies.merge(carchmodule.dependencies())
-        self._dependencies.append(carchmodule)
-
-    def edit(self):
-        if self.destdir:
-            print "Warning 'destdir = %s' ignored for %s" % (self.destdir, self.name)
-
-    def gather(self):
-        CollectTarget.gather(self)
-
-    _cdict = {'s':2,'m':1,'b':1,'x':1,'a':0,'z':0, 'p':1}
-
-    def assemble(self, pkgnm=None):
-        if pkgnm is None:
-            pkgnm = self.name
-        arch = carchive.CArchive()
-        toc = []
-        pytoc = []
-        for nm, path, typ in self.toc.toList():
-            compress = self._cdict[typ]
-            if typ == 'b' or (self.usefullname and typ in 'ms'):
-                nm = os.path.basename(path)
-            if typ == 'm':
-                pytoc.append((nm, path, compress, typ))
-            else:
-                toc.append((nm, path, compress, typ))
-        toc = toc + archivebuilder.GetCompiled(pytoc)
-        arch.build(pkgnm, toc)
-        return arch
-
-class FullExeTarget(ArchiveTarget):
-    usefullname = 0
-    def __init__(self, cfg, sectnm, cnvrts):
-        ArchiveTarget.__init__(self, cfg, sectnm, cnvrts)
-
-    def gather(self):
-        for script in self.script:
-            #print "FullExeTarget.gather: script is", repr(script)
-            rsrc = resource.makeresource(script, self.pathprefix)
-            rsrc = resource.scriptresource(rsrc.name, rsrc.path)
-            #print " resource is", repr(rsrc)
-            self.toc.merge(rsrc.binaries)
-        ArchiveTarget.gather(self)
-        if not self.zlib:
-            self.toc.merge(rsrc.modules)
-        self._dependencies = ltoc.lTOC()
-
-    _cdict = {'s':2,'m':0,'b':1,'x':0,'a':0,'z':0}
-    _edict = { (1, 1):'Runw_d.exe', (1, 0):'Runw.exe', (0, 1):'Run_d.exe', (0, 0):'Run.exe'}
-
-    def assemble(self):
-        pkgname = tempfile.mktemp()
-        arch = ArchiveTarget.assemble(self, pkgname)
-        exe = self._edict[(self.userunw, self.debug)]
-        exe = os.path.normpath(os.path.join(pyinsthome, 'support', exe))
-##        copyFile([exe, pkgname], self.name)
-##        os.remove(pkgname)
-        # Thomas Heller's icon code
-        # my version
-        if self.icon:
-            myexe = tempfile.mktemp()
-            copyFile (exe, myexe)
-            try:
-                from icon import CopyIcons
-                CopyIcons(myexe, self.icon)
-            except ImportError:
-                print "win32api is required for updating icons"
-                print "You should have win32api.pyd and PyWinTypes20.dll"
-                print "in the installation directory."
-                print "Please copy them to Python's DLLS subdirectory"
-                print "(or install Mark Hammond's Win32 extensions)."
-##        iconfile = None
-##        for name in self.cfg.sections():
-##            if self.cfg.get (name, "type") == "STANDALONE":
-##                try:
-##                    iconfile = self.cfg.get (name, "iconfile")
-##                except:
-##                    pass
-##        if iconfile:
-##            from icon import CopyIcons
-##            CopyIcons (myexe, iconfile)
-            copyFile ([myexe, pkgname], self.name)
-            os.remove(myexe)
-        else:
-            copyFile([exe, pkgname], self.name)
-        #os.remove(pkgname)
-
-class ExeTarget(FullExeTarget):
-    def __init__(self, cfg, sectnm, cnvrts):
-        FullExeTarget.__init__(self, cfg, sectnm, cnvrts)
-
-    def edit(self):
-        if not self.script:
-            raise ValueError, "EXE target %s requires 'script= <script>'" % self.__name__
-
-    def gather(self):
-        FullExeTarget.gather(self)
-        for i in range(len(self.toc)-1, -1, -1):
-            rsrc = self.toc[i]
-            if rsrc.typ == 'b':
-                self._dependencies.append(rsrc)
-                del self.toc[i]
-
-installpreamble = """\
-import sys, os
-import installutils
-import carchive_rt
-idir = installutils.getinstalldir()
-me = sys.argv[0]
-if me[:-4] != '.exe':
-    me = me + '.exe'
-this = carchive_rt.CArchive(sys.argv[0])
-here = sys.path[0]
-"""
-mvfile = "installutils.copyFile(os.path.join(here, '%s'), os.path.join(idir, '%s'))\n"
-extractfile = "open(os.path.join(idir, '%s'), 'wb').write(this.extract('%s')[1])\n"
-sitepreamble = """\
-import archive_rt
-import imputil
-import sys
-"""
-importzlib = "imputil.FuncImporter(archive_rt.ZlibArchive(sys.path[0]+'/%s').get_code).install()\n"
-
-class InstallTarget(FullExeTarget):
-    def __init__(self, cfg, sectnm, cnvrts):
-        FullExeTarget.__init__(self, cfg, sectnm, cnvrts)
-
-    def edit(self):
-        if not self.script:
-            open('gen_install.py', 'w').write(installpreamble)
-            self.script = ['gen_install.py']
-
-    def gather(self):
-        FullExeTarget.gather(self)
-        if self.script[0] == 'gen_install.py':
-            f = open(self.script[0], 'a')
-            for rsrc in self.toc:
-                if isinstance(rsrc, resource.binaryresource):
-                    nm = os.path.basename(rsrc.path)
-                    f.write(mvfile % (nm, nm))
-                elif isinstance(rsrc, resource.pythonresource):
-                    pass
-                elif isinstance(rsrc, resource.zlibresource):
-                    pass
-                else:
-                    f.write(extractfile % (rsrc.name, rsrc.name))
-                    if isinstance(rsrc, resource.archiveresource):
-                        #did it come with an install script?
-                        target = built.get(rsrc.name, None)
-                        if target:
-                           if hasattr(target, "installscript"):
-                               for script in target.installscript:
-                                   s = resource.makeresource(script, self.pathprefix)
-                                   txt = open(s.path, 'r').read()
-                                   f.write(txt)
-            f.close()
-
-dispatch = {
-                'PYZ': PYZTarget,
-                'CARCHIVE': ArchiveTarget,
-                'COLLECT': CollectTarget,
-                'STANDALONE': ExeTarget,
-                'INSTALL': InstallTarget,
-                'FULLEXE': FullExeTarget,
-}
-
-
-def makeTarget(cfg, section):
-    return dispatch[cfg.get(section, 'type')](cfg, section, optcnvrts)
-
-optdefaults = { 'type':'PYZ',
-                'script':'',            # INSTALL (opt) & STANDALONE (required)
-                'zlib':'',              # INSTALL, STANDALONE, COLLECT
-                'bindepends':'',        # INSTALL, COLLECT
-                'misc':'',              # INSTALL. COLLECT
-                'includetk': '0',       # INSTALL, COLLECT
-        'userunw': '0',         # STANDALONE
-                'dependencies':'',      # PYZ
-                'directories':'',       # PYZ
-                'excludes':'',          # PYZ, INSTALL, COLLECT
-                'expatterns': '',
-                'exstdlib': '0',
-                'extypes': '',
-                'includes':'',          # PYZ
-                'packages':'',          # PYZ
-                'destdir':'',           # COLLECT
-                'pathprefix': '',
-                'trees': '',
-                'debug': '0',
-                'support': '1', # include python20.dll & exceptons.pyc at a minimum
-                'icon': '',
-}
-
-optcnvrts = {   'type':'',
-                'name': 'getstring',
-                'exstdlib': 'getbool',
-                'console': 'getbool',
-                'analyze': 'getbool',
-                'debug': 'getbool',
-                'includetk': 'getbool',
-                'userunw': 'getbool',
-                'destdir': 'getstring',
-                'support': 'getbool',
-                '__name__': 'getstring',
-                'icon': 'getstring',
-}
-def main(opts, args):
-    global pyinsthome
-    global copyFile
-    pyinsthome = os.path.abspath(os.path.dirname(sys.argv[0]))
-    # sys.path.insert(0, os.path.join(pyinsthome, 'support'))
-    import installutils
-    copyFile = installutils.copyFile
-    global logfile
-    logfile = open('Builder.log','w')
-    targets = []
-    xref = {}
-    cfg = ConfigParser.ConfigParser(optdefaults)
-    for arg in args:
-        dirnm = os.path.dirname(arg)
-        if dirnm == '':
-            dirnm = '.'
-        autopath.append(os.path.abspath(dirnm))
-    cfg.read(args)
-    for section in cfg.sections():
-        target = makeTarget(cfg, section)
-        targets.append(target)
-        xref[section] = target
-    while targets:
-        for i in range(len(targets)):
-            target = targets[i]
-            for child in target.children:
-                if xref[child] in targets:
-                    break
-            else:       #no break - ready to build
-                target.dump()
-                target.build()
-                built[target.__name__] = target
-                built[target.name] = target
-                targets[i] = None
-                break
-        else:       #no break - couldn't find anything to build
-            names = map(lambda x: getattr(x, 'name'), targets)
-            raise RuntimeError, "circular dependencies in %s" % repr(names)
-        targets = filter(None, targets)
-
-def run(file):
-    main ([], file)
-
-if __name__ == '__main__':
-    import getopt
-    (opts, args) = getopt.getopt(sys.argv[1:], 'dv')
-    print "opts:", opts
-    print "args:", args
-    main(opts, args)

+ 0 - 0
direct/src/pyinst/Sources.pp


+ 0 - 0
direct/src/pyinst/__init__.py


+ 0 - 246
direct/src/pyinst/archive.py

@@ -1,246 +0,0 @@
-#
-# Gordon McMillan (as inspired and influenced by Greg Stein)
-#
-
-# subclasses may not need marshal or struct, but since they're
-# builtin, importing is safe.
-#
-# While an Archive is really an abstraction for any "filesystem
-# within a file", it is tuned for use with imputil.FuncImporter.
-# This assumes it contains python code objects, indexed by the
-# the internal name (ie, no '.py').
-# See carchive.py for a more general archive (contains anything)
-# that can be understood by a C program.
-
-import marshal
-import struct
-
-class Archive:
-  """ A base class for a repository of python code objects.
-
-      The get_code method is used by imputil.FuntionImporter
-      to get code objects by name.
-      Archives are flat namespaces, so conflict between module
-      names in different packages are possible. Use a different
-      Archive for each package.
-  """
-  MAGIC = 'PYL\0'
-  HDRLEN = 12        # default is MAGIC followed by python's magic, int pos of toc
-  TOCPOS = 8
-  TRLLEN = 0        # default - no trailer
-  TOCTMPLT = {}     #
-  os = None
-  def __init__(self, path=None, start=0):
-    """
-         Initialize an Archive. If path is omitted, it will be an empty Archive.
-         start is the seek position within path where the Archive starts."""
-    self.toc = None
-    self.path = path
-    self.start = start
-    import imp
-    self.pymagic = imp.get_magic()
-    if path is not None:
-      self.lib = open(self.path, 'rb')
-      self.checkmagic()
-      self.loadtoc()
-
-  ####### Sub-methods of __init__ - override as needed #############
-  def checkmagic(self):
-    """Verify version and validity of file.
-
-        Overridable.
-        Check to see if the file object self.lib actually has a file
-        we understand.
-    """
-    self.lib.seek(self.start)   #default - magic is at start of file
-    if self.lib.read(len(self.MAGIC)) != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    if self.lib.read(len(self.pymagic)) != self.pymagic:
-      raise RuntimeError, "%s has version mismatch to dll" % (self.path)
-
-  def loadtoc(self):
-    """Load the table of contents.
-
-        Overridable.
-        Default: After magic comes an int (4 byte native) giving the
-        position of the TOC within self.lib.
-        Default: The TOC is a marshal-able string.
-    """
-    self.lib.seek(self.start + self.TOCPOS)
-    (offset,) = struct.unpack('=i', self.lib.read(4))
-    self.lib.seek(self.start + offset)
-    self.toc = marshal.load(self.lib)
-
-  ######## This is what is called by FuncImporter #######
-  ## Since an Archive is flat, we ignore parent and modname.
-
-  def get_code(self, parent, modname, fqname):
-    """The import hook.
-
-       Called by imputil.FunctionImporter.
-       Override extract to tune getting code from the Archive."""
-    rslt = self.extract(fqname) # None if not found, (ispkg, code) otherwise
-    if rslt is None:
-      return None
-    ispkg, code = rslt
-    if ispkg:
-      return ispkg, code, {'__path__': []}
-    return rslt
-
-  ####### Core method - Override as needed  #########
-  def extract(self, name):
-    """ Get the object corresponding to name, or None.
-
-        NAME is the name as specified in an 'import name'.
-        'import a.b' will become:
-        extract('a') (return None because 'a' is not a code object)
-        extract('a.__init__') (return a code object)
-        extract('a.b') (return a code object)
-        Default implementation:
-          self.toc is a dict
-          self.toc[name] is pos
-          self.lib has the code object marshal-ed at pos
-    """
-    ispkg, pos = self.toc.get(name, (0, None))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.load(self.lib)
-
-  ########################################################################
-  # Informational methods
-
-  def contents(self):
-    """Return a list of the contents.
-
-       Default implementation assumes self.toc is a dict like object.
-    """
-    return self.toc.keys()
-
-  ########################################################################
-  # Building
-
-  ####### Top level method - shouldn't need overriding #######
-  def build(self, path, lTOC):
-    """Create an archive file of name PATH from LTOC.
-
-       lTOC is a 'logical TOC' - a list of (name, path, ...)
-       where name is the internal (import) name,
-       and path is a file to get the object from, eg './a.pyc'.
-    """
-    self.path = path
-    self.lib = open(path, 'wb')
-    #reserve space for the header
-    if self.HDRLEN:
-      self.lib.write('\0'*self.HDRLEN)
-
-    #create an empty toc
-
-    if type(self.TOCTMPLT) == type({}):
-      self.toc = {}
-    else:       # assume callable
-      self.toc = self.TOCTMPLT()
-
-    for tocentry in lTOC:
-      self.add(tocentry)   # the guts of the archive
-
-    tocpos = self.lib.tell()
-    self.save_toc(tocpos)
-    if self.TRLLEN:
-      self.save_trailer(tocpos)
-    if self.HDRLEN:
-      self.update_headers(tocpos)
-    self.lib.close()
-
-
-  ####### manages keeping the internal TOC and the guts in sync #######
-  def add(self, entry):
-    """Add an entry to the archive.
-
-      Override this to influence the mechanics of the Archive.
-       Assumes entry is a seq beginning with (nm, pth, ...) where
-       nm is the key by which we'll be asked for the object.
-       pth is the name of where we find the object.
-    """
-    if self.os is None:
-      import os
-      self.os = os
-    nm = entry[0]
-    pth = entry[1]
-    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-    self.toc[nm] = (ispkg, self.lib.tell())
-    f = open(entry[1], 'rb')
-    f.seek(8)   #skip magic and timestamp
-    self.lib.write(f.read())
-
-  def save_toc(self, tocpos):
-    """Save the table of contents.
-
-       Default - toc is a dict
-       Gets marshaled to self.lib
-    """
-    marshal.dump(self.toc, self.lib)
-
-  def save_trailer(self, tocpos):
-    """Placeholder for Archives with trailers."""
-    pass
-
-  def update_headers(self, tocpos):
-    """Update any header data.
-
-       Default header is  MAGIC + Python's magic + tocpos"""
-    self.lib.seek(self.start)
-    self.lib.write(self.MAGIC)
-    self.lib.write(self.pymagic)
-    self.lib.write(struct.pack('=i', tocpos))
-
-##############################################################
-#
-# ZlibArchive - an archive with compressed entries
-#
-
-class ZlibArchive(Archive):
-  """A subclass of Archive that compresses entries with zlib
-     and uses a (marshalled) dict as a table of contents"""
-  MAGIC = 'PYZ\0'
-  TOCPOS = 8
-  HDRLEN = 12
-  TRLLEN = 0
-  TOCTMPLT = {}
-  LEVEL = 9
-
-  def __init__(self, path=None, offset=0):
-    Archive.__init__(self, path, offset)
-    # dynamic import so not imported if not needed
-    global zlib
-    import zlib
-
-  def extract(self, name):
-    """Get the code object for NAME.
-
-       Return None if name is not in the table of contents.
-       Otherwise, return a tuple (ispkg, code)"""
-    (ispkg, pos, lngth) = self.toc.get(name, (0, None, 0))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.loads(zlib.decompress(self.lib.read(lngth)))
-
-  def add(self, entry):
-    """Add an entry.
-
-       ENTRY is a sequence where entry[0] is name and entry[1] is full path name.
-       zlib compress the code object, and build a toc entry"""
-    if self.os is None:
-      import os
-      self.os = os
-    nm = entry[0]
-    pth = entry[1]
-    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-    f = open(pth, 'rb')
-    f.seek(8)   #skip magic and timestamp
-    obj = zlib.compress(f.read(), self.LEVEL)
-    self.toc[nm] = (ispkg, self.lib.tell(), len(obj))
-    self.lib.write(obj)
-

+ 0 - 226
direct/src/pyinst/archive_rt.py

@@ -1,226 +0,0 @@
-#
-# Gordon McMillan (as inspired and influenced by Greg Stein)
-#
-
-# subclasses may not need marshal or struct, but since they're
-# builtin, importing is safe.
-#
-# While an Archive is really an abstraction for any "filesystem
-# within a file", it is tuned for use with imputil.FuncImporter.
-# This assumes it contains python code objects, indexed by the
-# the internal name (ie, no '.py').
-# See carchive.py for a more general archive (contains anything)
-# that can be understood by a C program.
-
-#archive_rt is a stripped down version of MEInc.Dist.archive.
-#It has had all building logic removed.
-#It's purpose is to bootstrap the Python installation.
-
-import marshal
-import struct
-
-class Archive:
-  """ A base class for a repository of python code objects.
-      The extract method is used by imputil.ArchiveImporter
-      to get code objects by name (fully qualified name), so
-      an enduser "import a.b" would become
-        extract('a.__init__')
-        extract('a.b')
-  """
-  MAGIC = 'PYL\0'
-  HDRLEN = 12        # default is MAGIC followed by python's magic, int pos of toc
-  TOCPOS = 8
-  TRLLEN = 0        # default - no trailer
-  TOCTMPLT = {}     #
-  os = None
-  def __init__(self, path=None, start=0):
-    "Initialize an Archive. If path is omitted, it will be an empty Archive."
-    self.toc = None
-    self.path = path
-    self.start = start
-    import imp
-    self.pymagic = imp.get_magic()
-    if path is not None:
-      self.lib = open(self.path, 'rb')
-      self.checkmagic()
-      self.loadtoc()
-
-  ####### Sub-methods of __init__ - override as needed #############
-  def checkmagic(self):
-    """ Overridable.
-        Check to see if the file object self.lib actually has a file
-        we understand.
-    """
-    self.lib.seek(self.start)   #default - magic is at start of file
-    if self.lib.read(len(self.MAGIC)) != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    if self.lib.read(len(self.pymagic)) != self.pymagic:
-      raise RuntimeError, "%s has version mismatch to dll" % (self.path)
-
-  def loadtoc(self):
-    """ Overridable.
-        Default: After magic comes an int (4 byte native) giving the
-        position of the TOC within self.lib.
-        Default: The TOC is a marshal-able string.
-    """
-    self.lib.seek(self.start + self.TOCPOS)
-    (offset,) = struct.unpack('=i', self.lib.read(4))
-    self.lib.seek(self.start + offset)
-    self.toc = marshal.load(self.lib)
-
-  ######## This is what is called by FuncImporter #######
-  ## Since an Archive is flat, we ignore parent and modname.
-
-  def get_code(self, parent, modname, fqname):
-    print "parent: ", parent
-    print "modname: ", modname
-    print "fqname: ", fqname
-    return self.extract(fqname) # None if not found, (ispkg, code) otherwise
-    if rslt is None:
-      return None
-    ispkg, code = rslt
-    if ispkg:
-      return ispkg, code, {'__path__': []}
-    return rslt
-
-  ####### Core method - Override as needed  #########
-  def extract(self, name):
-    """ Get the object corresponding to name, or None.
-        For use with imputil ArchiveImporter, object is a python code object.
-        'name' is the name as specified in an 'import name'.
-        'import a.b' will become:
-        extract('a') (return None because 'a' is not a code object)
-        extract('a.__init__') (return a code object)
-        extract('a.b') (return a code object)
-        Default implementation:
-          self.toc is a dict
-          self.toc[name] is pos
-          self.lib has the code object marshal-ed at pos
-    """
-    ispkg, pos = self.toc.get(name, (0, None))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.load(self.lib)
-
-  ########################################################################
-  # Informational methods
-
-  def contents(self):
-    """Return a list of the contents
-       Default implementation assumes self.toc is a dict like object.
-       Not required by ArchiveImporter.
-    """
-    return self.toc.keys()
-
-  ########################################################################
-  # Building
-
-  ####### Top level method - shouldn't need overriding #######
-##  def build(self, path, lTOC):
-##    """Create an archive file of name 'path'.
-##       lTOC is a 'logical TOC' - a list of (name, path, ...)
-##       where name is the internal name, eg 'a'
-##       and path is a file to get the object from, eg './a.pyc'.
-##    """
-##    self.path = path
-##    self.lib = open(path, 'wb')
-##    #reserve space for the header
-##    if self.HDRLEN:
-##      self.lib.write('\0'*self.HDRLEN)
-##
-##    #create an empty toc
-##
-##    if type(self.TOCTMPLT) == type({}):
-##      self.toc = {}
-##    else:       # assume callable
-##      self.toc = self.TOCTMPLT()
-##
-##    for tocentry in lTOC:
-##      self.add(tocentry)   # the guts of the archive
-##
-##    tocpos = self.lib.tell()
-##    self.save_toc(tocpos)
-##    if self.TRLLEN:
-##      self.save_trailer(tocpos)
-##    if self.HDRLEN:
-##      self.update_headers(tocpos)
-##    self.lib.close()
-##
-##
-##  ####### manages keeping the internal TOC and the guts in sync #######
-##  def add(self, entry):
-##    """Override this to influence the mechanics of the Archive.
-##       Assumes entry is a seq beginning with (nm, pth, ...) where
-##       nm is the key by which we'll be asked for the object.
-##       pth is the name of where we find the object. Overrides of
-##       get_obj_from can make use of further elements in entry.
-##    """
-##    if self.os is None:
-##      import os
-##      self.os = os
-##    nm = entry[0]
-##    pth = entry[1]
-##    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-##    self.toc[nm] = (ispkg, self.lib.tell())
-##    f = open(entry[1], 'rb')
-##    f.seek(8) #skip magic and timestamp
-##    self.lib.write(f.read())
-##
-##  def save_toc(self, tocpos):
-##    """Default - toc is a dict
-##       Gets marshaled to self.lib
-##    """
-##    marshal.dump(self.toc, self.lib)
-##
-##  def save_trailer(self, tocpos):
-##    """Default - not used"""
-##    pass
-##
-##  def update_headers(self, tocpos):
-##    """Default - MAGIC + Python's magic + tocpos"""
-##    self.lib.seek(self.start)
-##    self.lib.write(self.MAGIC)
-##    self.lib.write(self.pymagic)
-##    self.lib.write(struct.pack('=i', tocpos))
-
-##############################################################
-#
-# ZlibArchive - an archive with compressed entries
-#
-
-class ZlibArchive(Archive):
-  MAGIC = 'PYZ\0'
-  TOCPOS = 8
-  HDRLEN = 12
-  TRLLEN = 0
-  TOCTMPLT = {}
-  LEVEL = 9
-
-  def __init__(self, path=None, offset=0):
-    Archive.__init__(self, path, offset)
-    # dynamic import so not imported if not needed
-    global zlib
-    import zlib
-
-  def extract(self, name):
-    (ispkg, pos, lngth) = self.toc.get(name, (0, None, 0))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.loads(zlib.decompress(self.lib.read(lngth)))
-
-##  def add(self, entry):
-##    if self.os is None:
-##      import os
-##      self.os = os
-##    nm = entry[0]
-##    pth = entry[1]
-##    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-##    f = open(pth, 'rb')
-##    f.seek(8) #skip magic and timestamp
-##    obj = zlib.compress(f.read(), self.LEVEL)
-##    self.toc[nm] = (ispkg, self.lib.tell(), len(obj))
-##    self.lib.write(obj)
-##

+ 0 - 81
direct/src/pyinst/archivebuilder.py

@@ -1,81 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-# Gordon McMillan [email protected]
-#
-# A collection of routines for building a logical Table Of Contents
-# that Archive (subclasses) use to build themselves.
-# A logical Table of Contents is a sequence, each element of which is
-# a sequence, with at least 2 entries - "name" and "path".
-
-import os
-
-import string
-
-import py_compile
-
-def GetCompiled(seq, lvl='c'):
-  """SEQ is a list of .py files, or a logical TOC.
-     Return as .pyc or .pyo files (LVL) after ensuring their existence"""
-  if len(seq) == 0:
-    return seq
-  rslt = []
-  isTOC = 0
-  if type(seq[0]) == type(()):
-    isTOC = 1
-  for py in seq:
-    if isTOC:
-      (nm, fnm), rest = py[:2], py[2:]
-    else:
-      fnm = py
-    fnm = os.path.splitext(fnm)[0] + '.py'
-    cmpl = 1
-    pyc = fnm + lvl
-    if os.path.exists(pyc):
-      pytm = long(os.stat(fnm)[8])
-      ctm = long(os.stat(pyc)[8])
-      if pytm < ctm:
-        cmpl = 0
-    if cmpl:
-      py_compile.compile(fnm, pyc)
-    if isTOC:
-      rslt.append((nm, pyc)+rest)
-    else:
-      rslt.append(pyc)
-  return rslt
-
-import modulefinder
-MF = modulefinder
-import sys
-
-def Dependencies(script):
-  """Get a logical TOC directly from the dependencies of a script.
-  
-     The returned TOC does NOT contain the script.
-     It does contain extension modules. Uses modulefinder."""
-  rslt = []
-  (dir, name) = os.path.split(script)
-  if dir:
-    ppath = [os.path.normpath(dir)] + sys.path
-  else:
-    ppath = sys.path[:]
-  mf = MF.ModuleFinder(ppath, 0)
-  try:
-    mf.run_script(script)
-  except IOError:
-    print " Script not found:", script
-    return []
-  del mf.modules['__main__']
-  for (k, v) in mf.modules.items():
-    if v.__file__ is None:
-      del mf.modules[k]  # a builtin
-  for (k, v) in mf.modules.items():
-    #ispkg = os.path.basename(v.__file__) == '__init__.py'
-    d = os.path.dirname(v.__file__)
-    if not d:
-      v.__file__ = os.path.join(os.getcwd(), v.__file__)
-    #if ispkg:
-    #    rslt.append(k+'.__init__', v.__file__)
-    #else:
-    rslt.append((k, v.__file__))
-  return rslt
-

+ 0 - 169
direct/src/pyinst/bindepend.py

@@ -1,169 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-#
-# use dumpbin.exe (if present) to find the binary
-# dependencies of an extension module.
-# if dumpbin not available, pick apart the PE hdr of the binary
-# while this appears to work well, it is complex and subject to
-# problems with changes to PE hdrs (ie, this works only on 32 bit Intel
-# Windows format binaries)
-#
-# Note also that you should check the results to make sure that the
-# dlls are redistributable. I've listed most of the common MS dlls
-# under "excludes" below; add to this list as necessary (or use the
-# "excludes" option in the INSTALL section of the config file).
-
-import os
-import time
-import string
-import sys
-import tempfile
-import finder
-
-seen = {}
-excludes = {'KERNEL32.DLL':1,
-      'ADVAPI.DLL':1,
-      'MSVCRT.DLL':1,
-      'ADVAPI32.DLL':1,
-      'COMCTL32.DLL':1,
-      'CRTDLL.DLL':1,
-      'GDI32.DLL':1,
-      'MFC42.DLL':1,
-      'NTDLL.DLL':1,
-      'OLE32.DLL':1,
-      'OLEAUT32.DLL':1,
-      'RPCRT4.DLL':1,
-      'SHELL32.DLL':1,
-      'USER32.DLL':1,
-      'WINSPOOL.DRV':1,
-      'WS2HELP.DLL':1,
-      'WS2_32.DLL':1,
-      'WSOCK32.DLL':1,
-      'WINMM.DLL':1,
-      'COMDLG32.DLL':1,
-      'ZLIB.DLL':1,
-      'ODBC32.DLL':1,
-      'VERSION.DLL':1}
-
-def getfullnameof(mod, xtrapath = None):
-  """Return the full path name of MOD.
-
-      MOD is the basename of a dll or pyd.
-      XTRAPATH is a path or list of paths to search first.
-      Return the full path name of MOD.
-      Will search the full Windows search path, as well as sys.path"""
-  epath = finder.getpath()
-  if mod[-4:] in ('.pyd', '.PYD'):
-    epath = epath + sys.path
-  if xtrapath is not None:
-    if type(xtrapath) == type(''):
-      epath.insert(0, xtrapath)
-    else:
-      epath = xtrapath + epath
-  for p in epath:
-    npth = os.path.join(p, mod)
-    if os.path.exists(npth):
-      return npth
-  return ''
-
-def getImports1(pth):
-    """Find the binary dependencies of PTH.
-
-        This implementation (not used right now) uses the MSVC utility dumpbin"""
-    rslt = []
-    tmpf = tempfile.mktemp()
-    os.system('dumpbin /IMPORTS "%s" >%s' %(pth, tmpf))
-    time.sleep(0.1)
-    txt = open(tmpf,'r').readlines()
-    os.remove(tmpf)
-    i = 0
-    while i < len(txt):
-        tokens = string.split(txt[i])
-        if len(tokens) == 1 and string.find(tokens[0], '.') > 0:
-            rslt.append(string.strip(tokens[0]))
-        i = i + 1
-    return rslt
-
-def getImports2(pth):
-    """Find the binary dependencies of PTH.
-
-        This implementation walks through the PE header"""
-    import struct
-    rslt = []
-    try:
-      f = open(pth, 'rb').read()
-      pehdrd = struct.unpack('l', f[60:64])[0]
-      magic = struct.unpack('l', f[pehdrd:pehdrd+4])[0]
-      numsecs = struct.unpack('h', f[pehdrd+6:pehdrd+8])[0]
-      numdirs = struct.unpack('l', f[pehdrd+116:pehdrd+120])[0]
-      idata = ''
-      if magic == 17744:
-          importsec, sz = struct.unpack('2l', f[pehdrd+128:pehdrd+136])
-          secttbl = pehdrd + 120 + 8*numdirs
-          secttblfmt = '8s7l2h'
-          seclist = []
-          for i in range(numsecs):
-              seclist.append(struct.unpack(secttblfmt, f[secttbl+i*40:secttbl+(i+1)*40]))
-              #nm, vsz, va, rsz, praw, preloc, plnnums, qrelocs, qlnnums, flags \
-              # = seclist[-1]
-          for i in range(len(seclist)-1):
-              if seclist[i][2] <= importsec < seclist[i+1][2]:
-                  break
-          vbase = seclist[i][2]
-          raw = seclist[i][4]
-          idatastart = raw + importsec - vbase
-          idata = f[idatastart:idatastart+seclist[i][1]]
-          i = 0
-          while 1:
-              vsa =  struct.unpack('5l', idata[i*20:i*20+20])[3]
-              if vsa == 0:
-                  break
-              sa = raw + vsa - vbase
-              end = string.find(f, '\000', sa)
-              rslt.append(f[sa:end])
-              i = i + 1
-    except IOError:
-      print "bindepend cannot analyze %s - file not found!"
-    except struct.error:
-      print "bindepend cannot analyze %s - error walking thru pehdr"
-    return rslt
-
-def Dependencies(lTOC):
-  """Expand LTOC to include all the closure of binary dependencies.
-
-     LTOC is a logical table of contents, ie, a seq of tuples (name, path).
-     Return LTOC expanded by all the binary dependencies of the entries
-     in LTOC, except those listed in the module global EXCLUDES"""
-  for (nm, pth) in lTOC:
-    fullnm = string.upper(os.path.basename(pth))
-    if seen.get(string.upper(nm), 0):
-      continue
-    print "analyzing", nm
-    seen[string.upper(nm)] = 1
-    dlls = getImports(pth)
-    for lib in dlls:
-        print " found", lib
-        if excludes.get(string.upper(lib), 0):
-          continue
-        if seen.get(string.upper(lib), 0):
-          continue
-        npth = getfullnameof(lib)
-        if npth:
-          lTOC.append((lib, npth))
-        else:
-          print " lib not found:", lib, "dependency of",
-  return lTOC
-
-
-##if getfullnameof('dumpbin.exe') == '':
-##    def getImports(pth):
-##        return getImports2(pth)
-##else:
-##    def getImports(pth):
-##        return getImports1(pth)
-
-def getImports(pth):
-    """Forwards to either getImports1 or getImports2
-    """
-    return getImports2(pth)
-

+ 0 - 204
direct/src/pyinst/carchive.py

@@ -1,204 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-#
-# A subclass of Archive that can be understood
-# by a C program. See uplaunch.cpp for unpacking
-# from C.
-import archive
-import struct
-import zlib
-import strop
-
-class CTOC:
-  """A class encapsulating the table of contents of a CArchive.
-  
-     When written to disk, it is easily read from C."""
-  ENTRYSTRUCT = 'iiiibc' #(structlen, dpos, dlen, ulen, flag, typcd) followed by name
-  def __init__(self):
-    self.data = []
-  
-  def frombinary(self, s):
-    """Decode the binary string into an in memory list.
-    
-        S is a binary string."""
-    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-    p = 0
-    while p<len(s):
-      (slen, dpos, dlen, ulen, flag, typcd) = struct.unpack(self.ENTRYSTRUCT, 
-                                                  s[p:p+entrylen]) 
-      nmlen = slen - entrylen 
-      p = p + entrylen
-      (nm,) = struct.unpack(repr(nmlen)+'s', s[p:p+nmlen])
-      p = p + nmlen 
-      self.data.append((dpos, dlen, ulen, flag, typcd, nm[:-1]))
-
-  def tobinary(self):
-    """Return self as a binary string."""
-    import string
-    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-    rslt = []
-    for (dpos, dlen, ulen, flag, typcd, nm) in self.data:
-      nmlen = len(nm) + 1       # add 1 for a '\0'
-      rslt.append(struct.pack(self.ENTRYSTRUCT+repr(nmlen)+'s',
-        nmlen+entrylen, dpos, dlen, ulen, flag, typcd, nm+'\0'))
-    return string.join(rslt, '')
-
-  def add(self, dpos, dlen, ulen, flag, typcd, nm):
-    """Add an entry to the table of contents.
-    
-       DPOS is data position.
-       DLEN is data length.
-       ULEN is the uncompressed data len.
-       FLAG says if the data is compressed.
-       TYPCD is the "type" of the entry (used by the C code)
-       NM is the entry's name."""
-    self.data.append((dpos, dlen, ulen, flag, typcd, nm))
-
-  def get(self, ndx):
-    """return the toc entry (tuple) at index NDX"""
-    return self.data[ndx]
-
-  def __getitem__(self, ndx):
-    return self.data[ndx]
-
-  def find(self, name):
-    """Return the index of the toc entry with name NAME.
-    
-       Return -1 for failure."""
-    for i in range(len(self.data)):
-      if self.data[i][-1] == name:
-        return i
-    return -1
-
-class CArchive(archive.Archive):
-  """An Archive subclass that an hold arbitrary data.
-  
-     Easily handled from C or from Python."""
-  MAGIC = 'MEI\014\013\012\013\015'
-  HDRLEN = 0
-  TOCTMPLT = CTOC
-  TRLSTRUCT = '8siii'
-  TRLLEN = 20
-  LEVEL = 9
-  def __init__(self, path=None, start=0, len=0):
-    """Constructor.
-    
-       PATH is path name of file (create an empty CArchive if path is None).
-       START is the seekposition within PATH.
-       LEN is the length of the CArchive (if 0, then read till EOF). """
-    self.len = len
-    archive.Archive.__init__(self, path, start)
-
-  def checkmagic(self):
-    """Verify that self is a valid CArchive.
-    
-        Magic signature is at end of the archive."""
-    #magic is at EOF; if we're embedded, we need to figure where that is
-    if self.len:
-      self.lib.seek(self.start+self.len, 0)
-    else:
-      self.lib.seek(0, 2)
-    filelen = self.lib.tell()
-    if self.len:
-      self.lib.seek(self.start+self.len-self.TRLLEN, 0)
-    else:
-      self.lib.seek(-self.TRLLEN, 2)
-    (magic, totallen, tocpos, toclen) = struct.unpack(self.TRLSTRUCT, 
-                                                self.lib.read(self.TRLLEN))
-    if magic != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    self.pkgstart = filelen - totallen
-    if self.len:
-      if totallen != self.len or self.pkgstart != self.start:
-        raise RuntimeError, "Problem with embedded archive in %s" % self.path
-    self.tocpos, self.toclen = tocpos, toclen
-
-  def loadtoc(self):
-    """Load the table of contents into memory."""
-    self.toc = self.TOCTMPLT()
-    self.lib.seek(self.pkgstart+self.tocpos)
-    tocstr = self.lib.read(self.toclen)
-    self.toc.frombinary(tocstr)
-
-  def extract(self, name):
-    """Get the contents of an entry.
-    
-       NAME is an entry name.
-       Return the tuple (ispkg, contents).
-       For non-Python resoures, ispkg is meaningless (and 0).
-       Used by the import mechanism."""
-    if type(name) == type(''):
-      ndx = self.toc.find(name)
-      if ndx == -1:
-        return None
-    else:
-      ndx = name
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    self.lib.seek(self.pkgstart+dpos)
-    rslt = self.lib.read(dlen)
-    if flag == 1:
-      rslt = zlib.decompress(rslt)
-    if typcd == 'M':
-      return (1, rslt)
-    return (0, rslt)
-
-  def contents(self):
-    """Return the names of the entries"""
-    rslt = []
-    for (dpos, dlen, ulen, flag, typcd, nm) in self.toc:
-      rslt.append(nm)
-    return rslt
-
-  def add(self, entry):
-    """Add an ENTRY to the CArchive.
-    
-       ENTRY must have:
-         entry[0] is name (under which it will be saved).
-         entry[1] is fullpathname of the file.
-         entry[2] is a flag for it's storage format (0==uncompressed,
-         1==compressed, 2==Python source format)
-         entry[3] is the entry's type code."""
-    (nm, pathnm, flag, typcd) = entry[:4]
-    if flag == 2:
-        s = open(pathnm, 'r').read()
-        s = s + '\n\0'
-    else:
-        s = open(pathnm, 'rb').read()
-    ulen = len(s)
-    if flag == 1:
-      s = zlib.compress(s, self.LEVEL)
-    dlen = len(s)
-    where = self.lib.tell()
-    if typcd == 'm':
-      if strop.find(pathnm, '.__init__.py') > -1:
-        typcd = 'M'
-    self.toc.add(where, dlen, ulen, flag, typcd, nm)
-    self.lib.write(s)
-
-  def save_toc(self, tocpos):
-    """Save the table of contents to disk."""
-    self.tocpos = tocpos
-    tocstr = self.toc.tobinary()
-    self.toclen = len(tocstr)
-    self.lib.write(tocstr)
-
-  def save_trailer(self, tocpos):
-    """Save the trailer to disk.
-    
-       CArchives can be opened from the end - the trailer points
-       back to the start. """
-    totallen = tocpos + self.toclen + self.TRLLEN
-    trl = struct.pack(self.TRLSTRUCT, self.MAGIC, totallen, 
-                      tocpos, self.toclen)
-    self.lib.write(trl)
-
-  def openEmbedded(self, name):
-    """Open a CArchive of name NAME embedded within this CArchive."""
-    ndx = self.toc.find(name)
-    if ndx == -1:
-      raise KeyError, "Member '%s' not found in %s" % (name, self.path)
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    if flag:
-      raise ValueError, "Cannot open compressed archive %s in place"
-    return CArchive(self.path, self.pkgstart+dpos, dlen)

+ 0 - 157
direct/src/pyinst/carchive_rt.py

@@ -1,157 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-#
-# A subclass of Archive that can be understood
-# by a C program. See uplaunch.cpp for unpacking
-# from C.
-
-#carchive_rt is a stripped down version of MEInc.Dist.carchive.
-#It has had all building logic removed.
-#It's purpose is to bootstrap the Python installation.
-
-import archive_rt
-import struct
-import zlib
-import strop
-
-class CTOC:
-  ENTRYSTRUCT = 'iiiibc' #(structlen, dpos, dlen, ulen, flag, typcd) followed by name
-  def __init__(self):
-    self.data = []
-  
-  def frombinary(self, s):
-    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-    p = 0
-    while p<len(s):
-      (slen, dpos, dlen, ulen, flag, typcd) = struct.unpack(self.ENTRYSTRUCT, 
-                                                  s[p:p+entrylen]) 
-      nmlen = slen - entrylen 
-      p = p + entrylen
-      (nm,) = struct.unpack(repr(nmlen)+'s', s[p:p+nmlen])
-      p = p + nmlen 
-      self.data.append((dpos, dlen, ulen, flag, typcd, nm[:-1]))
-
-##  def tobinary(self):
-##    import string
-##    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-##    rslt = []
-##    for (dpos, dlen, ulen, flag, typcd, nm) in self.data:
-##      nmlen = len(nm) + 1     # add 1 for a '\0'
-##      rslt.append(struct.pack(self.ENTRYSTRUCT+repr(nmlen)+'s',
-##        nmlen+entrylen, dpos, dlen, ulen, flag, typcd, nm+'\0'))
-##    return string.join(rslt, '')
-##
-##  def add(self, dpos, dlen, ulen, flag, typcd, nm):
-##    self.data.append(dpos, dlen, ulen, flag, typcd, nm)
-
-  def get(self, ndx):
-    return self.data[ndx]
-
-  def __getitem__(self, ndx):
-    return self.data[ndx]
-
-  def find(self, name):
-    for i in range(len(self.data)):
-      if self.data[i][-1] == name:
-        return i
-    return -1
-
-class CArchive(archive_rt.Archive):
-  MAGIC = 'MEI\014\013\012\013\015'
-  HDRLEN = 0
-  TOCTMPLT = CTOC
-  TRLSTRUCT = '8siii'
-  TRLLEN = 20
-  LEVEL = 9
-  def __init__(self, path=None, start=0, len=0):
-    self.len = len
-    archive_rt.Archive.__init__(self, path, start)
-
-  def checkmagic(self):
-    #magic is at EOF; if we're embedded, we need to figure where that is
-    if self.len:
-      self.lib.seek(self.start+self.len, 0)
-    else:
-      self.lib.seek(0, 2)
-    filelen = self.lib.tell()
-    if self.len:
-      self.lib.seek(self.start+self.len-self.TRLLEN, 0)
-    else:
-      self.lib.seek(-self.TRLLEN, 2)
-    (magic, totallen, tocpos, toclen) = struct.unpack(self.TRLSTRUCT, 
-                                                self.lib.read(self.TRLLEN))
-    if magic != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    self.pkgstart = filelen - totallen
-    if self.len:
-      if totallen != self.len or self.pkgstart != self.start:
-        raise RuntimeError, "Problem with embedded archive in %s" % self.path
-    self.tocpos, self.toclen = tocpos, toclen
-
-  def loadtoc(self):
-    self.toc = self.TOCTMPLT()
-    self.lib.seek(self.pkgstart+self.tocpos)
-    tocstr = self.lib.read(self.toclen)
-    self.toc.frombinary(tocstr)
-
-  def extract(self, name):
-    if type(name) == type(''):
-      ndx = self.toc.find(name)
-      if ndx == -1:
-        return None
-    else:
-      ndx = name
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    self.lib.seek(self.pkgstart+dpos)
-    rslt = self.lib.read(dlen)
-    if flag == 1:
-      rslt = zlib.decompress(rslt)
-    if typcd == 'M':
-      return (1, rslt)
-    return (0, rslt)
-
-  def contents(self):
-    rslt = []
-    for (dpos, dlen, ulen, flag, typcd, nm) in self.toc:
-      rslt.append(nm)
-    return rslt
-
-##  def add(self, entry):
-##    (nm, pathnm, flag, typcd) = entry[:4]
-##    if flag == 2:
-##        s = open(pathnm, 'r').read()
-##        s = s + '\0'
-##    else:
-##        s = open(pathnm, 'rb').read()
-##    ulen = len(s)
-##    if flag == 1:
-##      s = zlib.compress(s, self.LEVEL)
-##    dlen = len(s)
-##    where = self.lib.tell()
-##    if typcd == 'm':
-##      if strop.find(pathnm, '.__init__.py') > -1:
-##        typcd = 'M'
-##    self.toc.add(where, dlen, ulen, flag, typcd, nm)
-##    self.lib.write(s)
-##
-##  def save_toc(self, tocpos):
-##    self.tocpos = tocpos
-##    tocstr = self.toc.tobinary()
-##    self.toclen = len(tocstr)
-##    self.lib.write(tocstr)
-##
-##  def save_trailer(self, tocpos):
-##    totallen = tocpos + self.toclen + self.TRLLEN
-##    trl = struct.pack(self.TRLSTRUCT, self.MAGIC, totallen, 
-##                      tocpos, self.toclen)
-##    self.lib.write(trl)
-
-  def openEmbedded(self, name):
-    ndx = self.toc.find(name)
-    if ndx == -1:
-      raise KeyError, "Member '%s' not found in %s" % (name, self.path)
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    if flag:
-      raise ValueError, "Cannot open compressed archive %s in place"
-    return CArchive(self.path, self.pkgstart+dpos, dlen)

+ 0 - 178
direct/src/pyinst/finder.py

@@ -1,178 +0,0 @@
-# copyright McMillan Enterprises, 1999
-import os, sys
-import string
-
-SCRIPT = 1
-GSCRIPT = 2
-MODULE = 3
-PACKAGE = 4
-PBINARY = 5
-BINARY = 6
-ZLIB = 7
-DIRECTORY = 8
-DATA = 9
-
-_bpath = None
-_ppath = None
-_pcache = {}
-
-def _locate(nm, xtrapath=None, base=None):
-    """Find a file / directory named NM in likely places.
-    
-       XTRAPATH is a list of paths to prepend to BASE.
-       If BASE is None, sys.path (as extended by packages) is used."""
-    ppath = base
-    if base is None:
-        ppath = _ppath
-    if xtrapath:
-        ppath = xtrapath + ppath
-    for pth in ppath:
-        fullnm = os.path.join(pth, nm)
-        #print " _locate trying", fullnm
-        if os.path.exists(fullnm):
-            break
-    else:
-        return ''
-    return fullnm
-
-def _locatepython(name, xtrapath=None):
-    """Locate a Python resource named NAME.
-    
-       All of the standard file extensions will be tried.
-       XTRAPATH is prepended to sys.path."""
-    for ext in ('.py', '.pyc', '.pyw', '.pyo', '.pyd', '.dll'):
-        fullnm = _locate(name+ext, xtrapath)
-        if fullnm:
-            break
-    else:
-        for ext in ('.pyd', '.dll'):
-            fullnm = _locate(name+ext, [], _bpath)
-            if fullnm:
-                break
-    return fullnm
-
-def ispackage(name):
-    """Determine if NAME is the name of a package."""
-    if os.path.exists(os.path.join(name, '__init__.py')):
-        return 1
-    if os.path.exists(os.path.join(name, '__init__.pyc')):
-        return 1
-    if os.path.exists(os.path.join(name, '__init__.pyo')):
-        return 1
-    return 0
-        
-def idtype(fullnm):
-    """Figure out what type of resource FULLNM refers to."""
-    if os.path.isdir(fullnm):
-        if ispackage(fullnm):
-            return PACKAGE
-        return DIRECTORY
-    ext = os.path.splitext(fullnm)[1]
-    if ext:
-        if ext == '.pyd':
-            return PBINARY
-        if ext == '.dll':
-            return BINARY
-        if ext in ('.pyc', '.pyo'):
-            return MODULE
-        if ext == '.py':
-            return SCRIPT
-        if ext == '.pyw':
-            return GSCRIPT
-        if ext == '.pyz':
-            return ZLIB
-    return DATA
-
-def identify(name, xtrapath=None):
-    """Find, and identify the type of NAME, using XTRAPATH as the
-       first place to look.
-
-       Return type, name and full path name.
-       NAME can be a logical or physical name. However, the logical
-       name of a Python module can easily conflict with the physical
-       name of something else, so beware."""
-    if os.path.exists(name):
-        fullnm = name
-    else:
-        if xtrapath is None:
-            xtra = []
-        elif id(xtrapath) in _pcache:
-            xtra = _pcache[id(xtrapath)]
-        else:
-            xtra = expand(xtrapath)
-            _pcache[id(xtrapath)] = xtra 
-        fullnm = _locate(name, xtra)
-        if not fullnm:
-            fullnm =  _locate(name, [], _bpath)
-            if not fullnm:
-                ext = os.path.splitext(name)[1]
-                if not ext:
-                    fullnm = _locatepython(name, xtra)
-                    if not fullnm:
-                        raise ValueError, "%s not found" % name
-                else:
-                    nm = name
-                    while string.count(nm, '.'):
-                        nm = string.replace(nm, '.', '/', 1)
-                        fullnm = _locatepython(nm, xtra)
-                        if fullnm:
-                            break
-                    else:
-                        raise ValueError, "%s not found" % name
-                    
-    typ = idtype(fullnm)
-    nm = name
-    if typ in (GSCRIPT, SCRIPT, MODULE, PACKAGE, PBINARY):
-        dir, nm = os.path.split(fullnm)
-        nm = os.path.splitext(nm)[0]
-    if typ == SCRIPT:
-        if os.path.exists(fullnm+'c') or os.path.exists(fullnm+'o'):
-            typ = MODULE
-    if typ in (MODULE, PACKAGE):
-        while idtype(dir) == PACKAGE:
-            dir, lnode = os.path.split(dir)
-            nm = lnode+'.'+nm
-    elif typ == BINARY:
-        nm = os.path.basename(fullnm)
-    return typ, nm, fullnm
- 
-def expand(plist):
-    """ expand a list of paths (like sys.path) to include all the 
-        directories that qualify as packages """
-    pkgdirs = []
-    for pth in plist:
-        os.path.walk(pth, pkgfinder, pkgdirs)
-    return plist + pkgdirs
-
-def pkgfinder(pkgdirs, dir, fnms):
-    i = 0
-    while i < len(fnms):
-        fnm = os.path.join(dir, fnms[i])
-        if os.path.isdir(fnm):
-            if ispackage(fnm):
-                pkgdirs.append(fnm)
-                i = i + 1
-            else:
-                del fnms[i]
-        else:
-            i = i + 1
-
-if _bpath is None:
-    try:
-        import win32api
-    except ImportError:
-        print "Cannot determine your Windows or System directories"
-        print "Please add them to your PATH if .dlls are not found"
-        _bpath = []
-    else:
-        sysdir = win32api.GetSystemDirectory()
-        sysdir2 = os.path.join(sysdir, '../SYSTEM')
-        windir = win32api.GetWindowsDirectory()
-        _bpath = [sysdir, sysdir2, windir]
-    _bpath.extend(string.split(os.environ.get('PATH', ''), ';'))
-if _ppath is None:
-    _ppath = expand(sys.path)
-        
-def getpath():
-    """Return the path that Windows will search for dlls."""
-    return _bpath

+ 0 - 138
direct/src/pyinst/icon.py

@@ -1,138 +0,0 @@
-# This code is courtesy of Thomas Heller, who
-# has kindly donated it to this project.
-RT_ICON = 3
-RT_GROUP_ICON = 14
-LOAD_LIBRARY_AS_DATAFILE = 2
-
-import struct
-
-class Structure:
-    def __init__ (self):
-        size = self._sizeInBytes = struct.calcsize (self._format_)
-        self._fields_ = list (struct.unpack (self._format_, '\000' * size))
-        indexes = self._indexes_ = {}
-        for i in range (len (self._names_)):
-            indexes[self._names_[i]] = i
-    def dump (self):
-        print "DUMP of", self
-        for name in self._names_:
-            if name[0] != '_':
-                print "%20s = %s" % (name, getattr (self, name))
-        print
-    def __getattr__ (self, name):
-        if name in self._names_:
-            index = self._indexes_[name]
-            return self._fields_[index]
-        try:
-            return self.__dict__[name]
-        except KeyError:
-            raise AttributeError, name
-    def __setattr__ (self, name, value):
-        if name in self._names_:
-            index = self._indexes_[name]
-            self._fields_[index] = value
-        else:
-            self.__dict__[name] = value
-    def tostring (self):
-        return apply (struct.pack, [self._format_,] + self._fields_)
-    def fromfile (self, file):
-        data = file.read (self._sizeInBytes)
-        self._fields_ = list (struct.unpack (self._format_, data))
-
-class ICONDIRHEADER (Structure):
-    _names_ = "idReserved", "idType", "idCount"
-    _format_ = "hhh"
-
-class ICONDIRENTRY (Structure):
-    _names_ = "bWidth", "bHeight", "bColorCount", "bReserved", "wPlanes", "wBitCount", "dwBytesInRes", "dwImageOffset"
-    _format_ = "bbbbhhii"
-
-class GRPICONDIR (Structure):
-    _names_ = "idReserved", "idType", "idCount"
-    _format_ = "hhh"
-
-class GRPICONDIRENTRY (Structure):
-    _names_ = "bWidth", "bHeight", "bColorCount", "bReserved", "wPlanes", "wBitCount", "dwBytesInRes", "nID"
-    _format_ = "bbbbhhih"
-
-class IconFile:
-    def __init__ (self, path):
-        self.path = path
-        file = open (path, "rb")
-        self.entries = []
-        self.images = []
-        header = self.header = ICONDIRHEADER()
-        header.fromfile (file)
-        for i in range (header.idCount):
-            entry = ICONDIRENTRY()
-            entry.fromfile (file)
-            self.entries.append (entry)
-        for e in self.entries:
-            file.seek (e.dwImageOffset, 0)
-            self.images.append (file.read (e.dwBytesInRes))
-
-    def grp_icon_dir (self):
-        return self.header.tostring()
-
-    def grp_icondir_entries (self):
-        data = ""
-        i = 1
-        for entry in self.entries:
-            e = GRPICONDIRENTRY()
-            for n in e._names_[:-1]:
-                setattr(e, n, getattr (entry, n))
-            e.nID = i
-            i = i + 1
-            data = data + e.tostring()
-        return data
-            
-
-def CopyIcons_FromIco (dstpath, srcpath):
-    f = IconFile (srcpath)
-    print "Updating icons from", srcpath, "to", dstpath
-    import win32api #, win32con
-    hdst = win32api.BeginUpdateResource (dstpath, 0)
-    data = f.grp_icon_dir()
-    data = data + f.grp_icondir_entries()
-    win32api.UpdateResource (hdst, RT_GROUP_ICON, 1, data)
-    print "Writing RT_GROUP_ICON resource with %d bytes" % len (data)
-    i = 1
-    for data in f.images:
-        win32api.UpdateResource (hdst, RT_ICON, i, data)
-        print "Writing RT_ICON resource with %d bytes" % len (data)
-        i = i + 1
-    win32api.EndUpdateResource (hdst, 0)
-
-def CopyIcons (dstpath, srcpath):
-    import os.path, string
-    index = None
-    try:
-        srcpath, index = map (string.strip, string.split (srcpath, ','))
-        index = int (index)
-    except:
-        pass
-    print "PATH, INDEX", srcpath, index
-    srcext = os.path.splitext (srcpath)[1]
-    if string.lower (srcext) == '.ico':
-        return CopyIcons_FromIco (dstpath, srcpath)
-    if index is not None:
-        print "Updating icons from", srcpath, ", %d to" % index, dstpath
-    else:
-        print "Updating icons from", srcpath, "to", dstpath
-    import win32api #, win32con
-    hdst = win32api.BeginUpdateResource (dstpath, 0)
-    hsrc = win32api.LoadLibraryEx (srcpath, 0, LOAD_LIBRARY_AS_DATAFILE)
-    if index is None:
-        grpname = win32api.EnumResourceNames (hsrc, RT_GROUP_ICON)[0]
-    elif index >= 0:
-        grpname = win32api.EnumResourceNames (hsrc, RT_GROUP_ICON)[index]
-    else:
-        grpname = -index
-    data = win32api.LoadResource (hsrc, RT_GROUP_ICON, grpname)
-    win32api.UpdateResource (hdst, RT_GROUP_ICON, grpname, data)
-    for iconname in win32api.EnumResourceNames (hsrc, RT_ICON):
-        data = win32api.LoadResource (hsrc, RT_ICON, iconname)
-        win32api.UpdateResource (hdst, RT_ICON, iconname, data)
-    win32api.FreeLibrary (hsrc)
-    win32api.EndUpdateResource (hdst, 0)
-

+ 0 - 487
direct/src/pyinst/imputil.py

@@ -1,487 +0,0 @@
-#
-# imputil.py
-#
-# Written by Greg Stein. Public Domain.
-# No Copyright, no Rights Reserved, and no Warranties.
-#
-# Utilities to help out with custom import mechanisms.
-#
-# Additional modifications were contribed by Marc-Andre Lemburg and
-# Gordon McMillan.
-#
-
-__version__ = '0.3'
-
-# note: avoid importing non-builtin modules
-import imp
-import sys
-import strop
-import __builtin__      ### why this instead of just using __builtins__ ??
-
-# for the DirectoryImporter
-import struct
-import marshal
-
-class Importer:
-  "Base class for replacing standard import functions."
-
-  def install(self):
-    self.__chain_import = __builtin__.__import__
-    self.__chain_reload = __builtin__.reload
-    __builtin__.__import__ = self._import_hook
-    __builtin__.reload = self._reload_hook
-
-  ######################################################################
-  #
-  # PRIVATE METHODS
-  #
-  def _import_hook(self, name, globals=None, locals=None, fromlist=None):
-    """Python calls this hook to locate and import a module.
-
-    This method attempts to load the (dotted) module name. If it cannot
-    find it, then it delegates the import to the next import hook in the
-    chain (where "next" is defined as the import hook that was in place
-    at the time this Importer instance was installed).
-    """
-
-    # determine the context of this import
-    parent = self._determine_import_context(globals)
-
-    # import the module within the context, or from the default context
-    top, tail = self._import_top_module(parent, name)
-    if top is None:
-      # the module was not found; delegate to the next import hook
-      return self.__chain_import(name, globals, locals, fromlist)
-
-    # the top module may be under the control of a different importer.
-    # if so, then defer to that importer for completion of the import.
-    # note it may be self, or is undefined so we (self) may as well
-    # finish the import.
-    importer = top.__dict__.get('__importer__', self)
-    return importer._finish_import(top, tail, fromlist)
-
-  def _finish_import(self, top, tail, fromlist):
-    # if "a.b.c" was provided, then load the ".b.c" portion down from
-    # below the top-level module.
-    bottom = self._load_tail(top, tail)
-
-    # if the form is "import a.b.c", then return "a"
-    if not fromlist:
-      # no fromlist: return the top of the import tree
-      return top
-
-    # the top module was imported by self, or it was not imported through
-    # the Importer mechanism and self is simply handling the import of
-    # the sub-modules and fromlist.
-    #
-    # this means that the bottom module was also imported by self, or we
-    # are handling things in the absence of a prior Importer
-    #
-    # ### why the heck are we handling it? what is the example scenario
-    # ### where this happens? note that we can't determine is_package()
-    # ### for non-Importer modules.
-    #
-    # since we imported/handled the bottom module, this means that we can
-    # also handle its fromlist (and reliably determine is_package()).
-
-    # if the bottom node is a package, then (potentially) import some modules.
-    #
-    # note: if it is not a package, then "fromlist" refers to names in
-    #       the bottom module rather than modules.
-    # note: for a mix of names and modules in the fromlist, we will
-    #       import all modules and insert those into the namespace of
-    #       the package module. Python will pick up all fromlist names
-    #       from the bottom (package) module; some will be modules that
-    #       we imported and stored in the namespace, others are expected
-    #       to be present already.
-    if self._is_package(bottom.__dict__):
-      self._import_fromlist(bottom, fromlist)
-
-    # if the form is "from a.b import c, d" then return "b"
-    return bottom
-
-  def _reload_hook(self, module):
-    "Python calls this hook to reload a module."
-
-    # reloading of a module may or may not be possible (depending on the
-    # importer), but at least we can validate that it's ours to reload
-    importer = module.__dict__.get('__importer__', None)
-    if importer is not self:
-      return self.__chain_reload(module)
-
-    # okay. it is ours, but we don't know what to do (yet)
-    ### we should blast the module dict and do another get_code(). need to
-    ### flesh this out and add proper docco...
-    raise SystemError, "reload not yet implemented"
-
-  def _determine_import_context(self, globals):
-    """Returns the context in which a module should be imported.
-
-    The context could be a loaded (package) module and the imported module
-    will be looked for within that package. The context could also be None,
-    meaning there is no context -- the module should be looked for as a
-    "top-level" module.
-    """
-
-    if not globals or \
-       globals.get('__importer__', None) is not self:
-      # globals does not refer to one of our modules or packages.
-      # That implies there is no relative import context, and it
-      # should just pick it off the standard path.
-      return None
-
-    # The globals refer to a module or package of ours. It will define
-    # the context of the new import. Get the module/package fqname.
-    parent_fqname = globals['__name__']
-
-    # for a package, return itself (imports refer to pkg contents)
-    if self._is_package(globals):
-      parent = sys.modules[parent_fqname]
-      assert globals is parent.__dict__
-      return parent
-
-    i = strop.rfind(parent_fqname, '.')
-
-    # a module outside of a package has no particular import context
-    if i == -1:
-      return None
-
-    # for a module in a package, return the package (imports refer to siblings)
-    parent_fqname = parent_fqname[:i]
-    parent = sys.modules[parent_fqname]
-    assert parent.__name__ == parent_fqname
-    return parent
-
-  def _import_top_module(self, parent, name):
-    """Locate the top of the import tree (relative or absolute).
-
-    parent defines the context in which the import should occur. See
-    _determine_import_context() for details.
-
-    Returns a tuple (module, tail). module is the loaded (top-level) module,
-    or None if the module is not found. tail is the remaining portion of
-    the dotted name.
-    """
-    i = strop.find(name, '.')
-    if i == -1:
-      head = name
-      tail = ""
-    else:
-      head = name[:i]
-      tail = name[i+1:]
-    if parent:
-      fqname = "%s.%s" % (parent.__name__, head)
-    else:
-      fqname = head
-    module = self._import_one(parent, head, fqname)
-    if module:
-      # the module was relative, or no context existed (the module was
-      # simply found on the path).
-      return module, tail
-    if parent:
-      # we tried relative, now try an absolute import (from the path)
-      module = self._import_one(None, head, head)
-      if module:
-        return module, tail
-
-    # the module wasn't found
-    return None, None
-
-  def _import_one(self, parent, modname, fqname):
-    "Import a single module."
-
-    # has the module already been imported?
-    try:
-      return sys.modules[fqname]
-    except KeyError:
-      pass
-
-    # load the module's code, or fetch the module itself
-    result = self.get_code(parent, modname, fqname)
-    if result is None:
-      return None
-
-    # did get_code() return an actual module? (rather than a code object)
-    is_module = type(result[1]) is type(sys)
-
-    # use the returned module, or create a new one to exec code into
-    if is_module:
-      module = result[1]
-    else:
-      module = imp.new_module(fqname)
-
-    ### record packages a bit differently??
-    module.__importer__ = self
-    module.__ispkg__ = result[0]
-
-    # if present, the third item is a set of values to insert into the module
-    if len(result) > 2:
-      module.__dict__.update(result[2])
-
-    # the module is almost ready... make it visible
-    sys.modules[fqname] = module
-
-    # execute the code within the module's namespace
-    if not is_module:
-      exec(result[1], module.__dict__)
-
-    # insert the module into its parent
-    if parent:
-      setattr(parent, modname, module)
-    return module
-
-  def _load_tail(self, m, tail):
-    """Import the rest of the modules, down from the top-level module.
-
-    Returns the last module in the dotted list of modules.
-    """
-    if tail:
-      for part in strop.splitfields(tail, '.'):
-        fqname = "%s.%s" % (m.__name__, part)
-        m = self._import_one(m, part, fqname)
-        if not m:
-          raise ImportError, "No module named " + fqname
-    return m
-
-  def _import_fromlist(self, package, fromlist):
-    'Import any sub-modules in the "from" list.'
-
-    # if '*' is present in the fromlist, then look for the '__all__' variable
-    # to find additional items (modules) to import.
-    if '*' in fromlist:
-      fromlist = list(fromlist) + list(package.__dict__.get('__all__', []))
-
-    for sub in fromlist:
-      # if the name is already present, then don't try to import it (it
-      # might not be a module!).
-      if sub != '*' and not hasattr(package, sub):
-        subname = "%s.%s" % (package.__name__, sub)
-        submod = self._import_one(package, sub, subname)
-        if not submod:
-          raise ImportError, "cannot import name " + subname
-
-  def _is_package(self, module_dict):
-    """Determine if a given module (dictionary) specifies a package.
-
-    The package status is in the module-level name __ispkg__. The module
-    must also have been imported by self, so that we can reliably apply
-    semantic meaning to __ispkg__.
-
-    ### weaken the test to issubclass(Importer)?
-    """
-    return module_dict.get('__importer__', None) is self and \
-           module_dict['__ispkg__']
-
-  ######################################################################
-  #
-  # METHODS TO OVERRIDE
-  #
-  def get_code(self, parent, modname, fqname):
-    """Find and retrieve the code for the given module.
-
-    parent specifies a parent module to define a context for importing. It
-    may be None, indicating no particular context for the search.
-
-    modname specifies a single module (not dotted) within the parent.
-
-    fqname specifies the fully-qualified module name. This is a (potentially)
-    dotted name from the "root" of the module namespace down to the modname.
-    If there is no parent, then modname==fqname.
-
-    This method should return None, a 2-tuple, or a 3-tuple.
-
-    * If the module was not found, then None should be returned.
-
-    * The first item of the 2- or 3-tuple should be the integer 0 or 1,
-      specifying whether the module that was found is a package or not.
-
-    * The second item is the code object for the module (it will be
-      executed within the new module's namespace). This item can also
-      be a fully-loaded module object (e.g. loaded from a shared lib).
-
-    * If present, the third item is a dictionary of name/value pairs that
-      will be inserted into new module before the code object is executed.
-      This provided in case the module's code expects certain values (such
-      as where the module was found). When the second item is a module
-      object, then these names/values will be inserted *after* the module
-      has been loaded/initialized.
-    """
-    raise RuntimeError, "get_code not implemented"
-
-
-######################################################################
-#
-# Simple function-based importer
-#
-class FuncImporter(Importer):
-  "Importer subclass to use a supplied function rather than method overrides."
-  def __init__(self, func):
-    self.func = func
-  def get_code(self, parent, modname, fqname):
-    return self.func(parent, modname, fqname)
-
-def install_with(func):
-  FuncImporter(func).install()
-
-
-######################################################################
-#
-# Base class for archive-based importing
-#
-class PackageArchiveImporter(Importer):
-  "Importer subclass to import from (file) archives."
-
-  def get_code(self, parent, modname, fqname):
-    if parent:
-      # if a parent "package" is provided, then we are importing a sub-file
-      # from the archive.
-      result = self.get_subfile(parent.__archive__, modname)
-      if result is None:
-        return None
-      if type(result) == type(()):
-        return (0,) + result
-      return 0, result
-
-    # no parent was provided, so the archive should exist somewhere on the
-    # default "path".
-    archive = self.get_archive(modname)
-    if archive is None:
-      return None
-    return 1, "", {'__archive__':archive}
-
-  def get_archive(self, modname):
-    """Get an archive of modules.
-
-    This method should locate an archive and return a value which can be
-    used by get_subfile to load modules from it. The value may be a simple
-    pathname, an open file, or a complex object that caches information
-    for future imports.
-
-    Return None if the archive was not found.
-    """
-    raise RuntimeError, "get_archive not implemented"
-
-  def get_subfile(self, archive, modname):
-    """Get code from a subfile in the specified archive.
-
-    Given the specified archive (as returned by get_archive()), locate
-    and return a code object for the specified module name.
-
-    A 2-tuple may be returned, consisting of a code object and a dict
-    of name/values to place into the target module.
-
-    Return None if the subfile was not found.
-    """
-    raise RuntimeError, "get_subfile not implemented"
-
-
-class PackageArchive(PackageArchiveImporter):
-  "PackageArchiveImporter subclass that refers to a specific archive."
-
-  def __init__(self, modname, archive_pathname):
-    self.__modname = modname
-    self.__path = archive_pathname
-
-  def get_archive(self, modname):
-    if modname == self.__modname:
-      return self.__path
-    return None
-
-  # get_subfile is passed the full pathname of the archive
-
-
-######################################################################
-#
-# Emulate the standard directory-based import mechanism
-#
-
-class DirectoryImporter(Importer):
-  "Importer subclass to emulate the standard importer."
-
-  def __init__(self, dir):
-    self.dir = dir
-    self.ext_char = __debug__ and 'c' or 'o'
-    self.ext = '.py' + self.ext_char
-
-  def get_code(self, parent, modname, fqname):
-    if parent:
-      dir = parent.__pkgdir__
-    else:
-      dir = self.dir
-
-    # pull the os module from our instance data. we don't do this at the
-    # top-level, because it isn't a builtin module (and we want to defer
-    # loading non-builtins until as late as possible).
-    try:
-      os = self.os
-    except AttributeError:
-      import os
-      self.os = os
-
-    pathname = os.path.join(dir, modname)
-    if os.path.isdir(pathname):
-      values = { '__pkgdir__': pathname }
-      ispkg = 1
-      pathname = os.path.join(pathname, '__init__')
-    else:
-      values = { }
-      ispkg = 0
-
-    t_py = self._timestamp(pathname + '.py')
-    t_pyc = self._timestamp(pathname + self.ext)
-    if t_py is None and t_pyc is None:
-      return None
-    code = None
-    if t_py is None or (t_pyc is not None and t_pyc >= t_py):
-      f = open(pathname + self.ext, 'rb')
-      if f.read(4) == imp.get_magic():
-        t = struct.unpack('<I', f.read(4))[0]
-        if t == t_py:
-          code = marshal.load(f)
-      f.close()
-    if code is None:
-      code = self._compile(pathname + '.py', t_py)
-    return ispkg, code, values
-
-  def _timestamp(self, pathname):
-    try:
-      s = self.os.stat(pathname)
-    except OSError:
-      return None
-    return long(s[8])
-
-  def _compile(self, pathname, timestamp):
-    codestring = open(pathname, 'r').read()
-    if codestring and codestring[-1] != '\n':
-      codestring = codestring + '\n'
-    code = __builtin__.compile(codestring, pathname, 'exec')
-
-    # try to cache the compiled code
-    try:
-      f = open(pathname + self.ext_char, 'wb')
-      f.write('\0\0\0\0')
-      f.write(struct.pack('<I', timestamp))
-      marshal.dump(code, f)
-      f.flush()
-      f.seek(0, 0)
-      f.write(imp.get_magic())
-      f.close()
-    except OSError:
-      pass
-
-    return code
-
-  def __repr__(self):
-    return '<%s.%s for "%s" at 0x%x>' % (self.__class__.__module__,
-                                         self.__class__.__name__,
-                                         self.dir,
-                                         id(self))
-
-def _test_dir():
-  "Debug/test function to create DirectoryImporters from sys.path."
-  path = sys.path[:]
-  path.reverse()
-  for d in path:
-    DirectoryImporter(d).install()
-
-######################################################################

+ 0 - 91
direct/src/pyinst/installutils.py

@@ -1,91 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# demo code - use as you please.
-import os
-import stat
-
-def copyFile(srcFiles, destFile, append=0):
-    '''
-    Copy one or more files to another file.  If srcFiles is a list, then all
-    will be concatenated together to destFile.  The append flag is also valid
-    for single file copies.
-
-    destFile will have the mode, ownership and timestamp of the last file
-    copied/appended.
-    '''
-    if type(srcFiles) == type([]):
-        # in case we need to overwrite on the first file...
-        copyFile(srcFiles[0], destFile, append)
-        for file in srcFiles[1:]:
-            copyFile(file, destFile, 1)
-        return
-
-    mode = 'wb'
-    if append:
-        mode = 'ab'
-    print " ", srcFiles, "->",
-    input = open(srcFiles, 'rb')
-    if input:
-        print destFile
-        output = open(destFile, mode)
-        while 1:
-            bytesRead = input.read(8192)
-            if bytesRead:
-                output.write(bytesRead)
-            else:
-                break
-
-        input.close()
-        output.close()
-
-        stats = os.stat(srcFiles)
-        os.chmod(destFile, stats[stat.ST_MODE])
-        try:        # FAT16 file systems have only one file time
-            os.utime(destFile, (stats[stat.ST_ATIME], stats[stat.ST_MTIME]))
-        except:
-            pass
-        try:        
-            os.chown(destFile, stats[stat.ST_UID], stats[stat.ST_GID])
-        except:
-            pass
-
-def ensure(dirct):
-    dirnm = dirct
-    plist = []
-    try:
-        while not os.path.exists(dirnm):
-            dirnm, base = os.path.split(dirnm)
-            if base == '':
-                break
-            plist.insert(0, base)
-        for d in plist:
-            dirnm = os.path.join(dirnm, d)
-            os.mkdir(dirnm)
-    except:
-        return 0
-    return 1
-
-def getinstalldir(prompt="Enter an installation directory: "):
-    while 1:
-        installdir = raw_input("Enter an installation directory: ")
-        installdir = os.path.normpath(installdir)
-        if ensure(installdir):
-            break
-        else:
-            print installdir, "is not a valid pathname"
-            r = raw_input("Try again (y/n)?: ")
-            if r in 'nN':
-                sys.exit(0)
-    return installdir
-
-def installCArchive(nm, basedir, suffixdir):
-    import carchive_rt
-    fulldir = os.path.join(basedir, suffixdir)
-    if ensure(fulldir):
-        pkg = carchive_rt.CArchive(nm)
-        for fnm in pkg.contents():
-            stuff = pkg.extract(fnm)[1]
-            outnm = os.path.join(fulldir, fnm)
-            if ensure(os.path.dirname(outnm)):
-                open(outnm, 'wb').write(stuff)
-        pkg = None
-        os.remove(nm)

+ 0 - 85
direct/src/pyinst/ltoc.py

@@ -1,85 +0,0 @@
-import os, sys, UserList
-import finder, tocfilter, resource
-
-class lTOC(UserList.UserList):
-    """ A class for managing lists of resources.
-        Should be a UserList subclass. Doh. 
-        Like a list, but has merge(other) and filter() methods """
-    def __init__(self, reslist=None, filters=None):
-        UserList.UserList.__init__(self, reslist)
-        self.filters = []
-        if filters is not None:
-            self.filters = filters[:]
-    def prepend(self, res):
-        self.resources.insert(0, res)
-    def merge(self, other):
-        ' merge in another ltoc, discarding dups and preserving order '
-        tmp = {}
-        for res in self.data:
-            tmp[res.name] = 0
-        for res in other:
-            if tmp.get(res.name, 1):
-                self.data.append(res)
-                tmp[res.name] = 0
-    def filter(self):
-        ' invoke all filters '
-        for i in range(len(self.data)):
-            res = self.data[i]
-            if res:
-                for f in self.filters:
-                    if f.matches(res):
-                        self.data[i] = None
-                        break
-        self.data = filter(None, self.data)
-        return self
-    def unique(self):
-        ' remove all duplicate entries, preserving order '
-        new = self.__class__()
-        new.merge(self)
-        self.data = new.data
-    def toList(self):
-        ' return self as a list of (name, path, typ) '
-        tmp = []
-        for res in self.data:
-            tmp.append((res.name, res.path, res.typ))
-        return tmp
-    def addFilter(self, filter):
-        if type(filter) == type(''):
-            self.filters.append(finder.makeresource(filter).asFilter())
-        else:
-            if type(filter) == type(self):
-                if isinstance(filter, tocfilter._Filter):
-                    self.filters.append(filter)
-                elif isinstance(filter, resource.resource):
-                    self.filters.append(filter.asFilter())
-                else:
-                    raise ValueError, "can't make filter from %s", repr(filter)
-            else:
-                raise ValueError, "can't make filter from %s", repr(filter)
-        print " added filter", repr(self.filters[-1])             
-            
-   
-if __name__ == '__main__':
-    sys.path.insert(0, '.')
-    import finder
-    import pprint
-    s = finder.scriptresource('finder.py', './finder.py')
-    ##    pyltoc = lTOC(s.modules)
-    ##    l1 = pyltoc.toList()
-    ##    print "Raw py ltoc:", pprint.pprint(l1)
-    ##    f1 = ModFilter(['dospath', 'macpath', 'posixpath'])
-    ##    l2 = lTOC(s.modules).filter(f1).toList()
-    ##    print "Filter out dospath, macpath, posixpath:", pprint.pprint(l2)
-    ##    f2 = DirFilter(['.'])
-    ##    l3 = lTOC(s.modules).filter(f2).toList()
-    ##    print "Filter out current dir:", pprint.pprint(l3)
-    ##    f3 = StdLibFilter()
-    ##    l4 = lTOC(s.modules).filter(f3).toList()
-    ##    print "Filter out stdlib:", pprint.pprint(l4)
-    ##    #print "Filter out current dir and stdlib:", lTOC(s.modules).filter(f2, f3).toList()
-    binltoc = lTOC(s.binaries)
-    print "Raw bin ltoc:", pprint.pprint(binltoc.toList())
-    binltoc.addFilter('c:/winnt/system32')
-    pprint.pprint(binltoc.filter().toList())
-    
-    

+ 0 - 42
direct/src/pyinst/mkarchive.py

@@ -1,42 +0,0 @@
-#import MkWrap
-import imputil
-import strop
-import zlib
-import os
-import marshal
-
-class MkImporter:
-    def __init__(self, db, viewnm='pylib'):
-        self.db = db
-        self.view = db.getas(viewnm+'[name:S, ispkg:I, code:M]') # an MkWrap view object
-    def setImportHooks(self):
-        imputil.FuncImporter(self.get_code).install()
-    def get_code(self, parent, modname, fqname):
-        if self.view is None:
-            return None
-        ndx = self.view.search(name=fqname)
-        if ndx < len(self.view):
-            row = self.view[ndx]
-            if row.name == fqname:
-                return (row.ispkg, marshal.loads(zlib.decompress(row.code)))
-        return None
-    def build(self, lTOC):
-        for entry in lTOC:
-            nm, fnm = entry[0], entry[1]
-            ispkg = os.path.splitext(os.path.basename(fnm))[0] == '__init__'
-            ndx = self.view.search(name=nm)
-            if ndx < len(self.view):
-                row = self.view[ndx]
-                if row.name != nm:
-                    self.view.insert(ndx, {})
-                    row = self.view[ndx]
-            else:
-                ndx = self.view.append({})
-                row = self.view[ndx]
-            row.name = nm
-            row.ispkg = ispkg
-            f = open(fnm, 'rb')
-            f.seek(8)
-            obj = zlib.compress(f.read(), 9)
-            row.code = obj
-        self.db.commit()

+ 0 - 436
direct/src/pyinst/modulefinder.py

@@ -1,436 +0,0 @@
-"""Find modules used by a script, using introspection."""
-
-import dis
-import imp
-import marshal
-import os
-import re
-import string
-import sys
-
-if sys.platform=="win32":
-    # On Windows, we can locate modules in the registry with
-    # the help of the win32api package.
-    try:
-        import win32api
-    except ImportError:
-        print "The win32api module is not available - modules listed"
-        print "in the registry will not be found."
-        win32api = None
-
-
-IMPORT_NAME = dis.opname.index('IMPORT_NAME')
-IMPORT_FROM = dis.opname.index('IMPORT_FROM')
-
-# Modulefinder does a good job at simulating Python's, but it can not
-# handle __path__ modifications packages make at runtime.  Therefore there
-# is a mechanism whereby you can register extra paths in this map for a
-# package, and it will be honoured.
-
-# Note this is a mapping is lists of paths.
-packagePathMap = {}
-
-# A Public interface
-def AddPackagePath(packagename, path):
-    paths = packagePathMap.get(packagename, [])
-    paths.append(path)
-    packagePathMap[packagename] = paths
-
-class Module:
-
-    def __init__(self, name, file=None, path=None):
-        self.__name__ = name
-        self.__file__ = file
-        self.__path__ = path
-        self.__code__ = None
-
-    def __repr__(self):
-        s = "Module(%s" % repr(self.__name__)
-        if self.__file__ is not None:
-            s = s + ", %s" % repr(self.__file__)
-        if self.__path__ is not None:
-            s = s + ", %s" % repr(self.__path__)
-        s = s + ")"
-        return s
-
-
-class ModuleFinder:
-
-    def __init__(self, path=None, debug=0, excludes = []):
-        if path is None:
-            path = sys.path
-        self.path = path
-        self.modules = {}
-        self.badmodules = {}
-        self.debug = debug
-        self.indent = 0
-        self.excludes = excludes
-
-    def msg(self, level, str, *args):
-        if level <= self.debug:
-            for i in range(self.indent):
-                print "   ",
-            print str,
-            for arg in args:
-                print repr(arg),
-            print
-
-    def msgin(self, *args):
-        level = args[0]
-        if level <= self.debug:
-            self.indent = self.indent + 1
-            apply(self.msg, args)
-
-    def msgout(self, *args):
-        level = args[0]
-        if level <= self.debug:
-            self.indent = self.indent - 1
-            apply(self.msg, args)
-
-    def run_script(self, pathname):
-        self.msg(2, "run_script", pathname)
-        fp = open(pathname)
-        stuff = ("", "r", imp.PY_SOURCE)
-        self.load_module('__main__', fp, pathname, stuff)
-
-    def load_file(self, pathname):
-        dir, name = os.path.split(pathname)
-        name, ext = os.path.splitext(name)
-        fp = open(pathname)
-        stuff = (ext, "r", imp.PY_SOURCE)
-        self.load_module(name, fp, pathname, stuff)
-
-    def import_hook(self, name, caller=None, fromlist=None):
-        self.msg(3, "import_hook", name, caller, fromlist)
-        parent = self.determine_parent(caller)
-        q, tail = self.find_head_package(parent, name)
-        m = self.load_tail(q, tail)
-        if not fromlist:
-            return q
-        if m.__path__:
-            self.ensure_fromlist(m, fromlist)
-
-    def determine_parent(self, caller):
-        self.msgin(4, "determine_parent", caller)
-        if not caller:
-            self.msgout(4, "determine_parent -> None")
-            return None
-        pname = caller.__name__
-        if caller.__path__:
-            parent = self.modules[pname]
-            assert caller is parent
-            self.msgout(4, "determine_parent ->", parent)
-            return parent
-        if '.' in pname:
-            i = string.rfind(pname, '.')
-            pname = pname[:i]
-            parent = self.modules[pname]
-            assert parent.__name__ == pname
-            self.msgout(4, "determine_parent ->", parent)
-            return parent
-        self.msgout(4, "determine_parent -> None")
-        return None
-
-    def find_head_package(self, parent, name):
-        self.msgin(4, "find_head_package", parent, name)
-        if '.' in name:
-            i = string.find(name, '.')
-            head = name[:i]
-            tail = name[i+1:]
-        else:
-            head = name
-            tail = ""
-        if parent:
-            qname = "%s.%s" % (parent.__name__, head)
-        else:
-            qname = head
-        q = self.import_module(head, qname, parent)
-        if q:
-            self.msgout(4, "find_head_package ->", (q, tail))
-            return q, tail
-        if parent:
-            qname = head
-            parent = None
-            q = self.import_module(head, qname, parent)
-            if q:
-                self.msgout(4, "find_head_package ->", (q, tail))
-                return q, tail
-        self.msgout(4, "raise ImportError: No module named", qname)
-        raise ImportError, "No module named " + qname
-
-    def load_tail(self, q, tail):
-        self.msgin(4, "load_tail", q, tail)
-        m = q
-        while tail:
-            i = string.find(tail, '.')
-            if i < 0: i = len(tail)
-            head, tail = tail[:i], tail[i+1:]
-            mname = "%s.%s" % (m.__name__, head)
-            m = self.import_module(head, mname, m)
-            if not m:
-                self.msgout(4, "raise ImportError: No module named", mname)
-                raise ImportError, "No module named " + mname
-        self.msgout(4, "load_tail ->", m)
-        return m
-
-    def ensure_fromlist(self, m, fromlist, recursive=0):
-        self.msg(4, "ensure_fromlist", m, fromlist, recursive)
-        for sub in fromlist:
-            if sub == "*":
-                if not recursive:
-                    all = self.find_all_submodules(m)
-                    if all:
-                        self.ensure_fromlist(m, all, 1)
-            elif not hasattr(m, sub):
-                subname = "%s.%s" % (m.__name__, sub)
-                submod = self.import_module(sub, subname, m)
-                if not submod:
-                    raise ImportError, "No module named " + subname
-
-    def find_all_submodules(self, m):
-        if not m.__path__:
-            return
-        modules = {}
-        suffixes = [".py", ".pyc", ".pyo"]
-        for dir in m.__path__:
-            try:
-                names = os.listdir(dir)
-            except os.error:
-                self.msg(2, "can't list directory", dir)
-                continue
-            for name in names:
-                mod = None
-                for suff in suffixes:
-                    n = len(suff)
-                    if name[-n:] == suff:
-                        mod = name[:-n]
-                        break
-                if mod and mod != "__init__":
-                    modules[mod] = mod
-        return modules.keys()
-
-    def import_module(self, partname, fqname, parent):
-        self.msgin(3, "import_module", partname, fqname, parent)
-        try:
-            m = self.modules[fqname]
-        except KeyError:
-            pass
-        else:
-            self.msgout(3, "import_module ->", m)
-            return m
-        if fqname in self.badmodules:
-            self.msgout(3, "import_module -> None")
-            self.badmodules[fqname][parent.__name__] = None
-            return None
-        try:
-            fp, pathname, stuff = self.find_module(partname,
-                                                   parent and parent.__path__)
-        except ImportError:
-            self.msgout(3, "import_module ->", None)
-            return None
-        try:
-            m = self.load_module(fqname, fp, pathname, stuff)
-        finally:
-            if fp: fp.close()
-        if parent:
-            setattr(parent, partname, m)
-        self.msgout(3, "import_module ->", m)
-        return m
-
-    def load_module(self, fqname, fp, pathname, (suffix, mode, type)):
-        self.msgin(2, "load_module", fqname, fp and "fp", pathname)
-        if type == imp.PKG_DIRECTORY:
-            m = self.load_package(fqname, pathname)
-            self.msgout(2, "load_module ->", m)
-            return m
-        if type == imp.PY_SOURCE:
-            co = compile(fp.read()+'\n', pathname, 'exec')
-        elif type == imp.PY_COMPILED:
-            if fp.read(4) != imp.get_magic():
-                self.msgout(2, "raise ImportError: Bad magic number", pathname)
-                raise ImportError, "Bad magic number in %s", pathname
-            fp.read(4)
-            co = marshal.load(fp)
-        else:
-            co = None
-        m = self.add_module(fqname)
-        m.__file__ = pathname
-        if co:
-            m.__code__ = co
-            self.scan_code(co, m)
-        self.msgout(2, "load_module ->", m)
-        return m
-
-    def scan_code(self, co, m):
-        code = co.co_code
-        n = len(code)
-        i = 0
-        lastname = None
-        while i < n:
-            c = code[i]
-            i = i+1
-            op = ord(c)
-            if op >= dis.HAVE_ARGUMENT:
-                oparg = ord(code[i]) + ord(code[i+1])*256
-                i = i+2
-            if op == IMPORT_NAME:
-                name = lastname = co.co_names[oparg]
-                if lastname not in self.badmodules:
-                    try:
-                        self.import_hook(name, m)
-                    except ImportError, msg:
-                        self.msg(2, "ImportError:", str(msg))
-                        if name not in self.badmodules:
-                            self.badmodules[name] = {}
-                        self.badmodules[name][m.__name__] = None
-            elif op == IMPORT_FROM:
-                name = co.co_names[oparg]
-                assert lastname is not None
-                if lastname not in self.badmodules:
-                    try:
-                        self.import_hook(lastname, m, [name])
-                    except ImportError, msg:
-                        self.msg(2, "ImportError:", str(msg))
-                        fullname = lastname + "." + name
-                        if fullname not in self.badmodules:
-                            self.badmodules[fullname] = {}
-                        self.badmodules[fullname][m.__name__] = None
-            else:
-                lastname = None
-        for c in co.co_consts:
-            if isinstance(c, type(co)):
-                self.scan_code(c, m)
-
-    def load_package(self, fqname, pathname):
-        self.msgin(2, "load_package", fqname, pathname)
-        m = self.add_module(fqname)
-        m.__file__ = pathname
-        m.__path__ = [pathname]
-
-        # As per comment at top of file, simulate runtime __path__ additions.
-        m.__path__ = m.__path__ + packagePathMap.get(fqname, [])
-
-        fp, buf, stuff = self.find_module("__init__", m.__path__)
-        self.load_module(fqname, fp, buf, stuff)
-        self.msgout(2, "load_package ->", m)
-        return m
-
-    def add_module(self, fqname):
-        if fqname in self.modules:
-            return self.modules[fqname]
-        self.modules[fqname] = m = Module(fqname)
-        return m
-
-    def find_module(self, name, path):
-        if name in self.excludes:
-            self.msgout(3, "find_module -> Excluded")
-            raise ImportError, name
-
-        if path is None:
-            if name in sys.builtin_module_names:
-                return (None, None, ("", "", imp.C_BUILTIN))
-
-            # Emulate the Registered Module support on Windows.
-            if sys.platform=="win32" and win32api is not None:
-                HKEY_LOCAL_MACHINE = 0x80000002
-                try:
-                    pathname = win32api.RegQueryValue(HKEY_LOCAL_MACHINE, "Software\\Python\\PythonCore\\%s\\Modules\\%s" % (sys.winver, name))
-                    fp = open(pathname, "rb")
-                    # XXX - To do - remove the hard code of C_EXTENSION.
-                    stuff = "", "rb", imp.C_EXTENSION
-                    return fp, pathname, stuff
-                except win32api.error:
-                    pass
-
-            path = self.path
-        return imp.find_module(name, path)
-
-    def report(self):
-        print
-        print "  %-25s %s" % ("Name", "File")
-        print "  %-25s %s" % ("----", "----")
-        # Print modules found
-        keys = self.modules.keys()
-        keys.sort()
-        for key in keys:
-            m = self.modules[key]
-            if m.__path__:
-                print "P",
-            else:
-                print "m",
-            print "%-25s" % key, m.__file__ or ""
-
-        # Print missing modules
-        keys = self.badmodules.keys()
-        keys.sort()
-        for key in keys:
-            # ... but not if they were explicitely excluded.
-            if key not in self.excludes:
-                mods = self.badmodules[key].keys()
-                mods.sort()
-                print "?", key, "from", string.join(mods, ', ')
-
-
-def test():
-    # Parse command line
-    import getopt
-    try:
-        opts, args = getopt.getopt(sys.argv[1:], "dmp:qx:")
-    except getopt.error, msg:
-        print msg
-        return
-
-    # Process options
-    debug = 1
-    domods = 0
-    addpath = []
-    exclude = []
-    for o, a in opts:
-        if o == '-d':
-            debug = debug + 1
-        if o == '-m':
-            domods = 1
-        if o == '-p':
-            addpath = addpath + string.split(a, os.pathsep)
-        if o == '-q':
-            debug = 0
-        if o == '-x':
-            exclude.append(a)
-
-    # Provide default arguments
-    if not args:
-        script = "hello.py"
-    else:
-        script = args[0]
-
-    # Set the path based on sys.path and the script directory
-    path = sys.path[:]
-    path[0] = os.path.dirname(script)
-    path = addpath + path
-    if debug > 1:
-        print "path:"
-        for item in path:
-            print "   ", repr(item)
-
-    # Create the module finder and turn its crank
-    mf = ModuleFinder(path, debug, exclude)
-    for arg in args[1:]:
-        if arg == '-m':
-            domods = 1
-            continue
-        if domods:
-            if arg[-2:] == '.*':
-                mf.import_hook(arg[:-2], None, ["*"])
-            else:
-                mf.import_hook(arg)
-        else:
-            mf.load_file(arg)
-    mf.run_script(script)
-    mf.report()
-
-
-if __name__ == '__main__':
-    try:
-        test()
-    except KeyboardInterrupt:
-        print "\n[interrupt]"

+ 0 - 317
direct/src/pyinst/resource.py

@@ -1,317 +0,0 @@
-import os
-import string
-import archivebuilder
-import carchive
-import tocfilter
-import bindepend
-import finder
-
-_cache = {}
-
-def makeresource(name, xtrapath=None):
-    """Factory function that returns a resource subclass.
-
-       NAME is the logical or physical name of a resource.
-       XTRAPTH is a path or list of paths to search first.
-       return one of the resource subclasses.
-       Warning - logical names can conflict; archive might return a directory,
-       when the module archive.py was desired."""
-    typ, nm, fullname = finder.identify(name, xtrapath)
-    fullname = os.path.normpath(fullname)
-    if fullname in _cache:
-        return _cache[fullname]
-    elif typ in (finder.SCRIPT, finder.GSCRIPT):
-        rsrc = scriptresource(nm, fullname)
-    elif typ == finder.MODULE:
-        rsrc = moduleresource(nm, fullname)
-    elif typ == finder.PACKAGE:
-        rsrc = pkgresource(nm, fullname)
-    elif typ in (finder.PBINARY, finder.BINARY):
-        rsrc = binaryresource(nm, fullname)
-    elif typ == finder.ZLIB:
-        rsrc = zlibresource(nm, fullname)
-    elif typ == finder.DIRECTORY:
-        rsrc = dirresource(nm, fullname)
-    else:
-        try:
-            carchive.CArchive(fullname)
-        except:
-            rsrc = dataresource(nm, fullname)
-        else:
-            rsrc = archiveresource(nm, fullname)
-    _cache[fullname] = rsrc
-    return rsrc
-
-class resource:
-    """ Base class for all resources.
-
-        contents() returns of list of what's contained (eg files in dirs)
-        dependencies() for Python resources returns a list of moduleresources
-         and binaryresources """
-    def __init__(self, name, path, typ):
-        """NAME is the logical name of the resource.
-           PATH is the full path to the resource.
-           TYP is the type code.
-           No editting or sanity checks."""
-        self.name = name
-        self.path = path
-        self.typ = typ
-    def __repr__(self):
-        return "(%(name)s, %(path)s, %(typ)s)" % self.__dict__
-    def contents(self):
-        """A list of resources within this resource.
-
-           Overridable.
-           Base implementation returns [self]"""
-        return [self]
-    def dependencies(self):
-        """A list of resources this resource requires.
-
-           Overridable.
-           Base implementation returns []"""
-        return []
-    def __cmp__(self, other):
-        if not isinstance(other, self.__class__):
-            return -1
-        return cmp((self.typ, self.name), (other.typ, other.name))
-    def asFilter(self):
-        """Create a tocfilter based on self.
-
-           Pure virtual"""
-        raise NotImplementedError
-    def asSource(self):
-        """Return self in source form.
-
-           Base implementation returns self"""
-        return self
-    def asBinary(self):
-        """Return self in binary form.
-
-           Base implementation returns self"""
-        return self
-
-class pythonresource(resource):
-    """An empty base class.
-
-       Used to classify resources."""
-    pass
-
-
-class scriptresource(pythonresource):
-    """ A top-level python resource.
-
-        Has (lazily computed) attributes, modules and binaries, which together
-        are the scripts dependencies() """
-    def __init__(self, name, fullname):
-        resource.__init__(self, name, fullname, 's')
-    def __getattr__(self, name):
-        if name == 'modules':
-            print "Analyzing python dependencies of", self.name, self.path
-            self.modules = []
-            self._binaries = []
-            nodes = string.split(self.name, '.')[:-1] # MEInc.Dist.archive -> ['MEInc', 'Dist']
-            for i in range(len(nodes)):
-                nm = string.join(nodes[:i+1], '.')
-                rsrc = makeresource(nm+'.__init__')
-                rsrc.name = nm
-                self.modules.append(rsrc)
-            for (nm, path) in archivebuilder.Dependencies(self.path):
-                path = os.path.normcase(os.path.abspath(path))
-                if os.path.splitext(path)[1] == '.py':
-                    self.modules.append(moduleresource(nm, path))
-                else:
-                    self._binaries.append(binaryresource(nm, path))
-            return self.modules
-        elif name == 'binaries':
-            x = self.modules
-            tmp = {}
-            for br in self._binaries:
-                tmp[br.name] = br
-                for br2 in br.dependencies():
-                    tmp[br2.name] = br2
-            self.binaries = tmp.values()
-            return self.binaries
-        else:
-            raise AttributeError, "%s" % name
-    def dependencies(self):
-        """Return all dependencies (Python and binary) of self."""
-        return self.modules + self.binaries
-    def asFilter(self):
-        """Return a ModFilter based on self."""
-        return tocfilter.ModFilter([self.name])
-    def asSource(self):
-        """Return self as a dataresource (ie, a text file wrapper)."""
-        r = dataresource(self.path)
-        r.name = apply(os.path.join, string.split(self.name, '.')[:-1]+[r.name])
-        return r
-
-class moduleresource(scriptresource):
-    """ A module resource (differs from script in that it will generally
-        be worked with as a .pyc instead of in source form) """
-    def __init__(self, name, fullname):
-        resource.__init__(self, name, fullname, 'm')
-    def asBinary(self):
-        """Return self as a dataresource (ie, a binary file wrapper)."""
-        r = dataresource(self.path)
-        r.name = os.path.basename(r.name)
-        r.typ = 'b'
-        return r
-    def asSource(self):
-        """Return self as a scriptresource (ie, uncompiled form)."""
-        return scriptresource(self.name, self.path[:-1]).asSource()
-
-class binaryresource(resource):
-    """A .dll or .pyd.
-
-       dependencies() yields more binaryresources """
-    def __init__(self, name, fullname):
-        if string.find(name, '.') == -1:
-            pth, bnm = os.path.split(fullname)
-            junk, ext = os.path.splitext(bnm)
-            fullname = os.path.join(pth, name + ext)
-        resource.__init__(self, name, fullname, 'b')
-        self._depends = None
-    def dependencies(self):
-        """Return a list of binary dependencies."""
-        if self._depends is not None:
-            return self._depends
-        self._depends = []
-        for (lib, path) in bindepend.Dependencies([(self.name, self.path)]):
-            self._depends.append(binaryresource(lib, path))
-        return self._depends
-    def asFilter(self):
-        """Create a FileFilter from self."""
-        return tocfilter.FileFilter([self.name])
-
-class dataresource(resource):
-    """A subclass for arbitrary files. """
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'x')
-    def asFilter(self):
-        """Create a FileFilter from self."""
-        return tocfilter.FileFilter([self.name])
-
-class archiveresource(dataresource):
-    """A sublcass for CArchives. """
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'a')
-
-class zlibresource(dataresource):
-    """A subclass for ZlibArchives. """
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'z')
-
-class dirresource(resource):
-    """A sublcass for a directory.
-
-       Generally transformed to a list of files through
-        contents() and filtered by file extensions or resource type.
-        Note that contents() is smart enough to regard a .py and .pyc
-        as the same resource. """
-    RECURSIVE = 0
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'd')
-        self._contents = None
-    def contents(self, prefix=''):
-        """Return the list of (typed) resources in self.name"""
-        if self._contents is not None:
-            return self._contents
-        self._contents = []
-        flist = os.listdir(self.path)
-        for fnm in flist:
-            try:
-                bnm, ext = os.path.splitext(fnm)
-                if ext == '.py' and (bnm+'.pyc' in flist or bnm+'.pyo' in flist):
-                    pass
-                elif ext == '.pyo' and (bnm + '.pyc' in flist):
-                    pass
-                else:
-                    rsrc = makeresource(os.path.join(self.path, fnm))
-                    if isinstance(rsrc, pkgresource):
-                        rsrc = self.__class__(rsrc.path)
-                    if self.RECURSIVE:
-                        if isinstance(rsrc, moduleresource) or isinstance(rsrc, scriptresource):
-                            rsrc = rsrc.asSource()
-                            fnm = os.path.basename(rsrc.path)
-                        rsrc.name = os.path.join(prefix, fnm)
-                        if rsrc.typ == 'd':
-                            rsrc.RECURSIVE = 1
-                            self._contents.extend(rsrc.contents(rsrc.name))
-                        else:
-                            self._contents.append(rsrc)
-                    else:
-                        self._contents.append(rsrc)
-            except ValueError, e:
-                raise RuntimeError, "Can't make resource from %s\n ValueError: %s" \
-                      % (os.path.join(self.path, fnm), repr(e.args))
-        return self._contents
-    def asFilter(self):
-        return tocfilter.DirFilter([self.path])
-
-class treeresource(dirresource):
-    """A subclass for a directory and subdirectories."""
-    RECURSIVE = 1
-    def __init__(self, name, fullname=None):
-        dirresource.__init__(self, name, fullname)
-
-class pkgresource(pythonresource):
-    """A Python package.
-
-        Note that contents() can be fooled by fancy __path__ statements. """
-    def __init__(self, nm, fullname):
-        resource.__init__(self, nm, fullname, 'p')
-        self._contents = None
-        self._depends = None
-    def contents(self, parent=None):
-        """Return a list of subpackages and modules in self."""
-        if self._contents is not None:
-            return self._contents
-        if parent is None:
-            parent = self.name
-        self._contents = []
-        cheat = treeresource(self.path)
-        for rsrc in cheat.contents():
-            if os.path.splitext(rsrc.path)[1] == '.py':
-                rsrc = moduleresource(string.replace(rsrc.name[:-3], os.sep, '.'),
-                                      rsrc.path)
-                if rsrc.name[-8:] == '__init__':
-                    rsrc.name = rsrc.name[:-9]
-            elif os.path.isdir(rsrc.path):
-                rsrc = makeresource(rsrc.path)
-            else:
-                continue
-            if rsrc.name:
-                rsrc.name = parent + '.' + rsrc.name
-            else:
-                rsrc.name = parent
-            if rsrc.typ == 'm':
-                self._contents.append(rsrc)
-            elif rsrc.typ == 'p':
-                self._contents.extend(rsrc.contents(rsrc.name))
-        return self._contents
-    def dependencies(self):
-        """Return the list of accumulated dependencies of all modules in self."""
-        if self._depends is not None:
-            return self._depends
-        self._depends = []
-        tmp = {}
-        for rsrc in self.contents():
-            for r in rsrc.dependencies():
-                tmp[r.name] = r
-        self._depends = tmp.values()
-        return self._depends
-    def asFilter(self):
-        """Create a PkgFilter from self."""
-        return tocfilter.PkgFilter([os.path.dirname(self.path)])
-
-
-
-
-
-
-
-if __name__ == '__main__':
-    s = scriptresource('finder.py', './finder.py')
-    print "s.modules:", s.modules
-    print "s.binaries:", s.binaries
-

+ 0 - 131
direct/src/pyinst/tocfilter.py

@@ -1,131 +0,0 @@
-import os
-import finder
-import re
-import sys
-
-def makefilter(name, xtrapath=None):
-    typ, nm, fullname = finder.identify(name, xtrapath)
-    if typ in (finder.SCRIPT, finder.GSCRIPT, finder.MODULE):
-        return ModFilter([os.path.splitext(nm)[0]])
-    if typ == finder.PACKAGE:
-        return PkgFilter([fullname])
-    if typ == finder.DIRECTORY:
-        return DirFilter([fullname])
-    if typ in (finder.BINARY, finder.PBINARY):
-        return FileFilter([nm])
-    return FileFilter([fullname])
-  
-class _Filter:
-    def __repr__(self):
-        return '<'+self.__class__.__name__+' '+repr(self.elements)+'>'
-    
-class _NameFilter(_Filter):
-    """ A filter mixin that matches (exactly) on name """
-    def matches(self, res):
-        return self.elements.get(res.name, 0)
-        
-class _PathFilter(_Filter):
-    """ A filter mixin that matches if the resource is below any of the paths"""
-    def matches(self, res):
-        p = os.path.normcase(os.path.abspath(res.path))
-        while len(p) > 3:
-            p = os.path.dirname(p)
-            if self.elements.get(p, 0):
-                return 1
-        return 0
-        
-class _ExtFilter(_Filter):
-    """ A filter mixin that matches based on file extensions (either way) """
-    include = 0
-    def matches(self, res):
-        fnd = self.elements.get(os.path.splitext(res.path)[1], 0)
-        if self.include:
-            return not fnd
-        return fnd
-    
-class _TypeFilter(_Filter):
-    """ A filter mixin that matches on resource type (either way) """
-    include = 0
-    def matches(self, res):
-        fnd = self.elements.get(res.typ, 0)
-        if self.include:
-            return not fnd
-        return fnd
-
-class _PatternFilter(_Filter):
-    """ A filter that matches if re.search succeeds on the resource path """
-    def matches(self, res):
-        for regex in self.elements:
-            if regex.search(res.path):
-                return 1
-        return 0
-    
-class ExtFilter(_ExtFilter):
-    """ A file extension filter.
-        ExtFilter(extlist, include=0)
-        where extlist is a list of file extensions """
-    def __init__(self, extlist, include=0):
-        self.elements = {}
-        for ext in extlist:
-            if ext[0:1] != '.':
-                ext = '.'+ext
-            self.elements[ext] = 1
-        self.include = include
-
-class TypeFilter(_TypeFilter):
-    """ A filter for resource types.
-        TypeFilter(typlist, include=0)
-        where typlist is a subset of ['a','b','d','m','p','s','x','z'] """
-    def __init__(self, typlist, include=0):
-        self.elements = {}
-        for typ in typlist:
-            self.elements[typ] = 1
-        self.include = include
-
-class FileFilter(_NameFilter):
-    """ A filter for data files """
-    def __init__(self, filelist):
-        self.elements = {}
-        for f in filelist:
-            self.elements[f] = 1
-              
-class ModFilter(_NameFilter):
-    """ A filter for Python modules.
-        ModFilter(modlist) where modlist is eg ['macpath', 'dospath'] """
-    def __init__(self, modlist):
-        self.elements = {}
-        for mod in modlist:
-            self.elements[mod] = 1
-            
-class DirFilter(_PathFilter):
-    """ A filter based on directories.
-        DirFilter(dirlist)
-        dirs may be relative and will be normalized.
-        Subdirectories of dirs will be excluded. """
-    def __init__(self, dirlist):
-        self.elements = {}
-        for pth in dirlist:
-            pth = os.path.normcase(os.path.abspath(pth))
-            self.elements[pth] = 1
-            
-class PkgFilter(_PathFilter):
-    """At this time, identical to a DirFilter (being lazy) """
-    def __init__(self, pkglist):
-        #warning - pkgs are expected to be full directories
-        self.elements = {}
-        for pkg in pkglist:
-            pth = os.path.normcase(os.path.abspath(pkg))
-            self.elements[pth] = 1
-            
-class StdLibFilter(_PathFilter):
-    """ A filter that excludes anything found in the standard library """
-    def __init__(self):
-        pth = os.path.normcase(os.path.join(sys.exec_prefix, 'lib'))
-        self.elements = {pth:1}
-     
-class PatternFilter(_PatternFilter):
-    """ A filter that excludes if any pattern is found in resource's path """
-    def __init__(self, patterns):
-        self.elements = []
-        for pat in patterns:
-            self.elements.append(re.compile(pat))

+ 0 - 1251
direct/src/showbase/ElementTree.py

@@ -1,1251 +0,0 @@
-#
-# ElementTree
-# $Id$
-#
-# light-weight XML support for Python 1.5.2 and later.
-#
-# history:
-# 2001-10-20 fl   created (from various sources)
-# 2001-11-01 fl   return root from parse method
-# 2002-02-16 fl   sort attributes in lexical order
-# 2002-04-06 fl   TreeBuilder refactoring, added PythonDoc markup
-# 2002-05-01 fl   finished TreeBuilder refactoring
-# 2002-07-14 fl   added basic namespace support to ElementTree.write
-# 2002-07-25 fl   added QName attribute support
-# 2002-10-20 fl   fixed encoding in write
-# 2002-11-24 fl   changed default encoding to ascii; fixed attribute encoding
-# 2002-11-27 fl   accept file objects or file names for parse/write
-# 2002-12-04 fl   moved XMLTreeBuilder back to this module
-# 2003-01-11 fl   fixed entity encoding glitch for us-ascii
-# 2003-02-13 fl   added XML literal factory
-# 2003-02-21 fl   added ProcessingInstruction/PI factory
-# 2003-05-11 fl   added tostring/fromstring helpers
-# 2003-05-26 fl   added ElementPath support
-# 2003-07-05 fl   added makeelement factory method
-# 2003-07-28 fl   added more well-known namespace prefixes
-# 2003-08-15 fl   fixed typo in ElementTree.findtext (Thomas Dartsch)
-# 2003-09-04 fl   fall back on emulator if ElementPath is not installed
-# 2003-10-31 fl   markup updates
-# 2003-11-15 fl   fixed nested namespace bug
-# 2004-03-28 fl   added XMLID helper
-# 2004-06-02 fl   added default support to findtext
-# 2004-06-08 fl   fixed encoding of non-ascii element/attribute names
-# 2004-08-23 fl   take advantage of post-2.1 expat features
-# 2005-02-01 fl   added iterparse implementation
-# 2005-03-02 fl   fixed iterparse support for pre-2.2 versions
-#
-# Copyright (c) 1999-2005 by Fredrik Lundh.  All rights reserved.
-#
-# [email protected]
-# http://www.pythonware.com
-#
-# --------------------------------------------------------------------
-# The ElementTree toolkit is
-#
-# Copyright (c) 1999-2005 by Fredrik Lundh
-#
-# By obtaining, using, and/or copying this software and/or its
-# associated documentation, you agree that you have read, understood,
-# and will comply with the following terms and conditions:
-#
-# Permission to use, copy, modify, and distribute this software and
-# its associated documentation for any purpose and without fee is
-# hereby granted, provided that the above copyright notice appears in
-# all copies, and that both that copyright notice and this permission
-# notice appear in supporting documentation, and that the name of
-# Secret Labs AB or the author not be used in advertising or publicity
-# pertaining to distribution of the software without specific, written
-# prior permission.
-#
-# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
-# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
-# ABILITY AND FITNESS.  IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
-# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
-# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
-# OF THIS SOFTWARE.
-# --------------------------------------------------------------------
-
-__all__ = [
-    # public symbols
-    "Comment",
-    "dump",
-    "Element", "ElementTree",
-    "fromstring",
-    "iselement", "iterparse",
-    "parse",
-    "PI", "ProcessingInstruction",
-    "QName",
-    "SubElement",
-    "tostring",
-    "TreeBuilder",
-    "VERSION", "XML",
-    "XMLTreeBuilder",
-    ]
-
-##
-# The <b>Element</b> type is a flexible container object, designed to
-# store hierarchical data structures in memory. The type can be
-# described as a cross between a list and a dictionary.
-# <p>
-# Each element has a number of properties associated with it:
-# <ul>
-# <li>a <i>tag</i>. This is a string identifying what kind of data
-# this element represents (the element type, in other words).</li>
-# <li>a number of <i>attributes</i>, stored in a Python dictionary.</li>
-# <li>a <i>text</i> string.</li>
-# <li>an optional <i>tail</i> string.</li>
-# <li>a number of <i>child elements</i>, stored in a Python sequence</li>
-# </ul>
-#
-# To create an element instance, use the {@link #Element} or {@link
-# #SubElement} factory functions.
-# <p>
-# The {@link #ElementTree} class can be used to wrap an element
-# structure, and convert it from and to XML.
-##
-
-import string, sys, re
-
-class _SimpleElementPath:
-    # emulate pre-1.2 find/findtext/findall behaviour
-    def find(self, element, tag):
-        for elem in element:
-            if elem.tag == tag:
-                return elem
-        return None
-    def findtext(self, element, tag, default=None):
-        for elem in element:
-            if elem.tag == tag:
-                return elem.text or ""
-        return default
-    def findall(self, element, tag):
-        if tag[:3] == ".//":
-            return element.getiterator(tag[3:])
-        result = []
-        for elem in element:
-            if elem.tag == tag:
-                result.append(elem)
-        return result
-
-try:
-    import ElementPath
-except ImportError:
-    # FIXME: issue warning in this case?
-    ElementPath = _SimpleElementPath()
-
-# TODO: add support for custom namespace resolvers/default namespaces
-# TODO: add improved support for incremental parsing
-
-VERSION = "1.2.6"
-
-##
-# Internal element class.  This class defines the Element interface,
-# and provides a reference implementation of this interface.
-# <p>
-# You should not create instances of this class directly.  Use the
-# appropriate factory functions instead, such as {@link #Element}
-# and {@link #SubElement}.
-#
-# @see Element
-# @see SubElement
-# @see Comment
-# @see ProcessingInstruction
-
-class _ElementInterface:
-    # <tag attrib>text<child/>...</tag>tail
-
-    ##
-    # (Attribute) Element tag.
-
-    tag = None
-
-    ##
-    # (Attribute) Element attribute dictionary.  Where possible, use
-    # {@link #_ElementInterface.get},
-    # {@link #_ElementInterface.set},
-    # {@link #_ElementInterface.keys}, and
-    # {@link #_ElementInterface.items} to access
-    # element attributes.
-
-    attrib = None
-
-    ##
-    # (Attribute) Text before first subelement.  This is either a
-    # string or the value None, if there was no text.
-
-    text = None
-
-    ##
-    # (Attribute) Text after this element's end tag, but before the
-    # next sibling element's start tag.  This is either a string or
-    # the value None, if there was no text.
-
-    tail = None # text after end tag, if any
-
-    def __init__(self, tag, attrib):
-        self.tag = tag
-        self.attrib = attrib
-        self._children = []
-
-    def __repr__(self):
-        return "<Element %s at %x>" % (self.tag, id(self))
-
-    ##
-    # Creates a new element object of the same type as this element.
-    #
-    # @param tag Element tag.
-    # @param attrib Element attributes, given as a dictionary.
-    # @return A new element instance.
-
-    def makeelement(self, tag, attrib):
-        return Element(tag, attrib)
-
-    ##
-    # Returns the number of subelements.
-    #
-    # @return The number of subelements.
-
-    def __len__(self):
-        return len(self._children)
-
-    ##
-    # Returns the given subelement.
-    #
-    # @param index What subelement to return.
-    # @return The given subelement.
-    # @exception IndexError If the given element does not exist.
-
-    def __getitem__(self, index):
-        return self._children[index]
-
-    ##
-    # Replaces the given subelement.
-    #
-    # @param index What subelement to replace.
-    # @param element The new element value.
-    # @exception IndexError If the given element does not exist.
-    # @exception AssertionError If element is not a valid object.
-
-    def __setitem__(self, index, element):
-        assert iselement(element)
-        self._children[index] = element
-
-    ##
-    # Deletes the given subelement.
-    #
-    # @param index What subelement to delete.
-    # @exception IndexError If the given element does not exist.
-
-    def __delitem__(self, index):
-        del self._children[index]
-
-    ##
-    # Returns a list containing subelements in the given range.
-    #
-    # @param start The first subelement to return.
-    # @param stop The first subelement that shouldn't be returned.
-    # @return A sequence object containing subelements.
-
-    def __getslice__(self, start, stop):
-        return self._children[start:stop]
-
-    ##
-    # Replaces a number of subelements with elements from a sequence.
-    #
-    # @param start The first subelement to replace.
-    # @param stop The first subelement that shouldn't be replaced.
-    # @param elements A sequence object with zero or more elements.
-    # @exception AssertionError If a sequence member is not a valid object.
-
-    def __setslice__(self, start, stop, elements):
-        for element in elements:
-            assert iselement(element)
-        self._children[start:stop] = list(elements)
-
-    ##
-    # Deletes a number of subelements.
-    #
-    # @param start The first subelement to delete.
-    # @param stop The first subelement to leave in there.
-
-    def __delslice__(self, start, stop):
-        del self._children[start:stop]
-
-    ##
-    # Adds a subelement to the end of this element.
-    #
-    # @param element The element to add.
-    # @exception AssertionError If a sequence member is not a valid object.
-
-    def append(self, element):
-        assert iselement(element)
-        self._children.append(element)
-
-    ##
-    # Inserts a subelement at the given position in this element.
-    #
-    # @param index Where to insert the new subelement.
-    # @exception AssertionError If the element is not a valid object.
-
-    def insert(self, index, element):
-        assert iselement(element)
-        self._children.insert(index, element)
-
-    ##
-    # Removes a matching subelement.  Unlike the <b>find</b> methods,
-    # this method compares elements based on identity, not on tag
-    # value or contents.
-    #
-    # @param element What element to remove.
-    # @exception ValueError If a matching element could not be found.
-    # @exception AssertionError If the element is not a valid object.
-
-    def remove(self, element):
-        assert iselement(element)
-        self._children.remove(element)
-
-    ##
-    # Returns all subelements.  The elements are returned in document
-    # order.
-    #
-    # @return A list of subelements.
-    # @defreturn list of Element instances
-
-    def getchildren(self):
-        return self._children
-
-    ##
-    # Finds the first matching subelement, by tag name or path.
-    #
-    # @param path What element to look for.
-    # @return The first matching element, or None if no element was found.
-    # @defreturn Element or None
-
-    def find(self, path):
-        return ElementPath.find(self, path)
-
-    ##
-    # Finds text for the first matching subelement, by tag name or path.
-    #
-    # @param path What element to look for.
-    # @param default What to return if the element was not found.
-    # @return The text content of the first matching element, or the
-    #     default value no element was found.  Note that if the element
-    #     has is found, but has no text content, this method returns an
-    #     empty string.
-    # @defreturn string
-
-    def findtext(self, path, default=None):
-        return ElementPath.findtext(self, path, default)
-
-    ##
-    # Finds all matching subelements, by tag name or path.
-    #
-    # @param path What element to look for.
-    # @return A list or iterator containing all matching elements,
-    #    in document order.
-    # @defreturn list of Element instances
-
-    def findall(self, path):
-        return ElementPath.findall(self, path)
-
-    ##
-    # Resets an element.  This function removes all subelements, clears
-    # all attributes, and sets the text and tail attributes to None.
-
-    def clear(self):
-        self.attrib.clear()
-        self._children = []
-        self.text = self.tail = None
-
-    ##
-    # Gets an element attribute.
-    #
-    # @param key What attribute to look for.
-    # @param default What to return if the attribute was not found.
-    # @return The attribute value, or the default value, if the
-    #     attribute was not found.
-    # @defreturn string or None
-
-    def get(self, key, default=None):
-        return self.attrib.get(key, default)
-
-    ##
-    # Sets an element attribute.
-    #
-    # @param key What attribute to set.
-    # @param value The attribute value.
-
-    def set(self, key, value):
-        self.attrib[key] = value
-
-    ##
-    # Gets a list of attribute names.  The names are returned in an
-    # arbitrary order (just like for an ordinary Python dictionary).
-    #
-    # @return A list of element attribute names.
-    # @defreturn list of strings
-
-    def keys(self):
-        return self.attrib.keys()
-
-    ##
-    # Gets element attributes, as a sequence.  The attributes are
-    # returned in an arbitrary order.
-    #
-    # @return A list of (name, value) tuples for all attributes.
-    # @defreturn list of (string, string) tuples
-
-    def items(self):
-        return self.attrib.items()
-
-    ##
-    # Creates a tree iterator.  The iterator loops over this element
-    # and all subelements, in document order, and returns all elements
-    # with a matching tag.
-    # <p>
-    # If the tree structure is modified during iteration, the result
-    # is undefined.
-    #
-    # @param tag What tags to look for (default is to return all elements).
-    # @return A list or iterator containing all the matching elements.
-    # @defreturn list or iterator
-
-    def getiterator(self, tag=None):
-        nodes = []
-        if tag == "*":
-            tag = None
-        if tag is None or self.tag == tag:
-            nodes.append(self)
-        for node in self._children:
-            nodes.extend(node.getiterator(tag))
-        return nodes
-
-# compatibility
-_Element = _ElementInterface
-
-##
-# Element factory.  This function returns an object implementing the
-# standard Element interface.  The exact class or type of that object
-# is implementation dependent, but it will always be compatible with
-# the {@link #_ElementInterface} class in this module.
-# <p>
-# The element name, attribute names, and attribute values can be
-# either 8-bit ASCII strings or Unicode strings.
-#
-# @param tag The element name.
-# @param attrib An optional dictionary, containing element attributes.
-# @param **extra Additional attributes, given as keyword arguments.
-# @return An element instance.
-# @defreturn Element
-
-def Element(tag, attrib={}, **extra):
-    attrib = attrib.copy()
-    attrib.update(extra)
-    return _ElementInterface(tag, attrib)
-
-##
-# Subelement factory.  This function creates an element instance, and
-# appends it to an existing element.
-# <p>
-# The element name, attribute names, and attribute values can be
-# either 8-bit ASCII strings or Unicode strings.
-#
-# @param parent The parent element.
-# @param tag The subelement name.
-# @param attrib An optional dictionary, containing element attributes.
-# @param **extra Additional attributes, given as keyword arguments.
-# @return An element instance.
-# @defreturn Element
-
-def SubElement(parent, tag, attrib={}, **extra):
-    attrib = attrib.copy()
-    attrib.update(extra)
-    element = parent.makeelement(tag, attrib)
-    parent.append(element)
-    return element
-
-##
-# Comment element factory.  This factory function creates a special
-# element that will be serialized as an XML comment.
-# <p>
-# The comment string can be either an 8-bit ASCII string or a Unicode
-# string.
-#
-# @param text A string containing the comment string.
-# @return An element instance, representing a comment.
-# @defreturn Element
-
-def Comment(text=None):
-    element = Element(Comment)
-    element.text = text
-    return element
-
-##
-# PI element factory.  This factory function creates a special element
-# that will be serialized as an XML processing instruction.
-#
-# @param target A string containing the PI target.
-# @param text A string containing the PI contents, if any.
-# @return An element instance, representing a PI.
-# @defreturn Element
-
-def ProcessingInstruction(target, text=None):
-    element = Element(ProcessingInstruction)
-    element.text = target
-    if text:
-        element.text = element.text + " " + text
-    return element
-
-PI = ProcessingInstruction
-
-##
-# QName wrapper.  This can be used to wrap a QName attribute value, in
-# order to get proper namespace handling on output.
-#
-# @param text A string containing the QName value, in the form {uri}local,
-#     or, if the tag argument is given, the URI part of a QName.
-# @param tag Optional tag.  If given, the first argument is interpreted as
-#     an URI, and this argument is interpreted as a local name.
-# @return An opaque object, representing the QName.
-
-class QName:
-    def __init__(self, text_or_uri, tag=None):
-        if tag:
-            text_or_uri = "{%s}%s" % (text_or_uri, tag)
-        self.text = text_or_uri
-    def __str__(self):
-        return self.text
-    def __hash__(self):
-        return hash(self.text)
-    def __cmp__(self, other):
-        if isinstance(other, QName):
-            return cmp(self.text, other.text)
-        return cmp(self.text, other)
-
-##
-# ElementTree wrapper class.  This class represents an entire element
-# hierarchy, and adds some extra support for serialization to and from
-# standard XML.
-#
-# @param element Optional root element.
-# @keyparam file Optional file handle or name.  If given, the
-#     tree is initialized with the contents of this XML file.
-
-class ElementTree:
-
-    def __init__(self, element=None, file=None):
-        assert element is None or iselement(element)
-        self._root = element # first node
-        if file:
-            self.parse(file)
-
-    ##
-    # Gets the root element for this tree.
-    #
-    # @return An element instance.
-    # @defreturn Element
-
-    def getroot(self):
-        return self._root
-
-    ##
-    # Replaces the root element for this tree.  This discards the
-    # current contents of the tree, and replaces it with the given
-    # element.  Use with care.
-    #
-    # @param element An element instance.
-
-    def _setroot(self, element):
-        assert iselement(element)
-        self._root = element
-
-    ##
-    # Loads an external XML document into this element tree.
-    #
-    # @param source A file name or file object.
-    # @param parser An optional parser instance.  If not given, the
-    #     standard {@link XMLTreeBuilder} parser is used.
-    # @return The document root element.
-    # @defreturn Element
-
-    def parse(self, source, parser=None):
-        if not hasattr(source, "read"):
-            source = open(source, "rb")
-        if not parser:
-            parser = XMLTreeBuilder()
-        while 1:
-            data = source.read(32768)
-            if not data:
-                break
-            parser.feed(data)
-        self._root = parser.close()
-        return self._root
-
-    ##
-    # Creates a tree iterator for the root element.  The iterator loops
-    # over all elements in this tree, in document order.
-    #
-    # @param tag What tags to look for (default is to return all elements)
-    # @return An iterator.
-    # @defreturn iterator
-
-    def getiterator(self, tag=None):
-        assert self._root is not None
-        return self._root.getiterator(tag)
-
-    ##
-    # Finds the first toplevel element with given tag.
-    # Same as getroot().find(path).
-    #
-    # @param path What element to look for.
-    # @return The first matching element, or None if no element was found.
-    # @defreturn Element or None
-
-    def find(self, path):
-        assert self._root is not None
-        if path[:1] == "/":
-            path = "." + path
-        return self._root.find(path)
-
-    ##
-    # Finds the element text for the first toplevel element with given
-    # tag.  Same as getroot().findtext(path).
-    #
-    # @param path What toplevel element to look for.
-    # @param default What to return if the element was not found.
-    # @return The text content of the first matching element, or the
-    #     default value no element was found.  Note that if the element
-    #     has is found, but has no text content, this method returns an
-    #     empty string.
-    # @defreturn string
-
-    def findtext(self, path, default=None):
-        assert self._root is not None
-        if path[:1] == "/":
-            path = "." + path
-        return self._root.findtext(path, default)
-
-    ##
-    # Finds all toplevel elements with the given tag.
-    # Same as getroot().findall(path).
-    #
-    # @param path What element to look for.
-    # @return A list or iterator containing all matching elements,
-    #    in document order.
-    # @defreturn list of Element instances
-
-    def findall(self, path):
-        assert self._root is not None
-        if path[:1] == "/":
-            path = "." + path
-        return self._root.findall(path)
-
-    ##
-    # Writes the element tree to a file, as XML.
-    #
-    # @param file A file name, or a file object opened for writing.
-    # @param encoding Optional output encoding (default is US-ASCII).
-
-    def write(self, file, encoding="us-ascii"):
-        assert self._root is not None
-        if not hasattr(file, "write"):
-            file = open(file, "wb")
-        if not encoding:
-            encoding = "us-ascii"
-        elif encoding != "utf-8" and encoding != "us-ascii":
-            file.write("<?xml version='1.0' encoding='%s'?>\n" % encoding)
-        self._write(file, self._root, encoding, {})
-
-    def _write(self, file, node, encoding, namespaces):
-        # write XML to file
-        tag = node.tag
-        if tag is Comment:
-            file.write("<!-- %s -->" % _escape_cdata(node.text, encoding))
-        elif tag is ProcessingInstruction:
-            file.write("<?%s?>" % _escape_cdata(node.text, encoding))
-        else:
-            items = node.items()
-            xmlns_items = [] # new namespaces in this scope
-            try:
-                if isinstance(tag, QName) or tag[:1] == "{":
-                    tag, xmlns = fixtag(tag, namespaces)
-                    if xmlns: xmlns_items.append(xmlns)
-            except TypeError:
-                _raise_serialization_error(tag)
-            file.write("<" + _encode(tag, encoding))
-            if items or xmlns_items:
-                items.sort() # lexical order
-                for k, v in items:
-                    try:
-                        if isinstance(k, QName) or k[:1] == "{":
-                            k, xmlns = fixtag(k, namespaces)
-                            if xmlns: xmlns_items.append(xmlns)
-                    except TypeError:
-                        _raise_serialization_error(k)
-                    try:
-                        if isinstance(v, QName):
-                            v, xmlns = fixtag(v, namespaces)
-                            if xmlns: xmlns_items.append(xmlns)
-                    except TypeError:
-                        _raise_serialization_error(v)
-                    file.write(" %s=\"%s\"" % (_encode(k, encoding),
-                                               _escape_attrib(v, encoding)))
-                for k, v in xmlns_items:
-                    file.write(" %s=\"%s\"" % (_encode(k, encoding),
-                                               _escape_attrib(v, encoding)))
-            if node.text or len(node):
-                file.write(">")
-                if node.text:
-                    file.write(_escape_cdata(node.text, encoding))
-                for n in node:
-                    self._write(file, n, encoding, namespaces)
-                file.write("</" + _encode(tag, encoding) + ">")
-            else:
-                file.write(" />")
-            for k, v in xmlns_items:
-                del namespaces[v]
-        if node.tail:
-            file.write(_escape_cdata(node.tail, encoding))
-
-# --------------------------------------------------------------------
-# helpers
-
-##
-# Checks if an object appears to be a valid element object.
-#
-# @param An element instance.
-# @return A true value if this is an element object.
-# @defreturn flag
-
-def iselement(element):
-    # FIXME: not sure about this; might be a better idea to look
-    # for tag/attrib/text attributes
-    return isinstance(element, _ElementInterface) or hasattr(element, "tag")
-
-##
-# Writes an element tree or element structure to sys.stdout.  This
-# function should be used for debugging only.
-# <p>
-# The exact output format is implementation dependent.  In this
-# version, it's written as an ordinary XML file.
-#
-# @param elem An element tree or an individual element.
-
-def dump(elem):
-    # debugging
-    if not isinstance(elem, ElementTree):
-        elem = ElementTree(elem)
-    elem.write(sys.stdout)
-    tail = elem.getroot().tail
-    if not tail or tail[-1] != "\n":
-        sys.stdout.write("\n")
-
-def _encode(s, encoding):
-    try:
-        return s.encode(encoding)
-    except AttributeError:
-        return s # 1.5.2: assume the string uses the right encoding
-
-_escape = re.compile(u"[&<>\"\u0080-\uffff]+")
-
-_escape_map = {
-    "&": "&amp;",
-    "<": "&lt;",
-    ">": "&gt;",
-    '"': "&quot;",
-}
-
-_namespace_map = {
-    # "well-known" namespace prefixes
-    "http://www.w3.org/XML/1998/namespace": "xml",
-    "http://www.w3.org/1999/xhtml": "html",
-    "http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf",
-    "http://schemas.xmlsoap.org/wsdl/": "wsdl",
-}
-
-def _raise_serialization_error(text):
-    raise TypeError(
-        "cannot serialize %r (type %s)" % (text, type(text).__name__)
-        )
-
-def _encode_entity(text, pattern=_escape):
-    # map reserved and non-ascii characters to numerical entities
-    def escape_entities(m, map=_escape_map):
-        out = []
-        append = out.append
-        for char in m.group():
-            text = map.get(char)
-            if text is None:
-                text = "&#%d;" % ord(char)
-            append(text)
-        return string.join(out, "")
-    try:
-        return _encode(pattern.sub(escape_entities, text), "ascii")
-    except TypeError:
-        _raise_serialization_error(text)
-
-#
-# the following functions assume an ascii-compatible encoding
-# (or "utf-16")
-
-def _escape_cdata(text, encoding=None):
-    # escape character data
-    try:
-        if encoding:
-            try:
-                text = _encode(text, encoding)
-            except UnicodeError:
-                return _encode_entity(text)
-        text = text.replace("&", "&amp;")
-        text = text.replace("<", "&lt;")
-        text = text.replace( ">", "&gt;")
-        return text
-    except (TypeError, AttributeError):
-        _raise_serialization_error(text)
-
-def _escape_attrib(text, encoding=None):
-    # escape attribute value
-    try:
-        if encoding:
-            try:
-                text = _encode(text, encoding)
-            except UnicodeError:
-                return _encode_entity(text)
-        text = text.replace("&", "&amp;")
-        text = text.replace("'", "&apos;") # FIXME: overkill
-        text = text.replace("\"", "&quot;")
-        text = text.replace("<", "&lt;")
-        text = text.replace(">", "&gt;")
-        return text
-    except (TypeError, AttributeError):
-        _raise_serialization_error(text)
-
-def fixtag(tag, namespaces):
-    # given a decorated tag (of the form {uri}tag), return prefixed
-    # tag and namespace declaration, if any
-    if isinstance(tag, QName):
-        tag = tag.text
-    namespace_uri, tag = string.split(tag[1:], "}", 1)
-    prefix = namespaces.get(namespace_uri)
-    if prefix is None:
-        prefix = _namespace_map.get(namespace_uri)
-        if prefix is None:
-            prefix = "ns%d" % len(namespaces)
-        namespaces[namespace_uri] = prefix
-        if prefix == "xml":
-            xmlns = None
-        else:
-            xmlns = ("xmlns:%s" % prefix, namespace_uri)
-    else:
-        xmlns = None
-    return "%s:%s" % (prefix, tag), xmlns
-
-##
-# Parses an XML document into an element tree.
-#
-# @param source A filename or file object containing XML data.
-# @param parser An optional parser instance.  If not given, the
-#     standard {@link XMLTreeBuilder} parser is used.
-# @return An ElementTree instance
-
-def parse(source, parser=None):
-    tree = ElementTree()
-    tree.parse(source, parser)
-    return tree
-
-##
-# Parses an XML document into an element tree incrementally, and reports
-# what's going on to the user.
-#
-# @param source A filename or file object containing XML data.
-# @param events A list of events to report back.  If omitted, only "end"
-#     events are reported.
-# @return A (event, elem) iterator.
-
-class iterparse:
-
-    def __init__(self, source, events=None):
-        if not hasattr(source, "read"):
-            source = open(source, "rb")
-        self._file = source
-        self._events = []
-        self._index = 0
-        self.root = self._root = None
-        self._parser = XMLTreeBuilder()
-        # wire up the parser for event reporting
-        parser = self._parser._parser
-        append = self._events.append
-        if events is None:
-            events = ["end"]
-        for event in events:
-            if event == "start":
-                try:
-                    parser.ordered_attributes = 1
-                    parser.specified_attributes = 1
-                    def handler(tag, attrib_in, event=event, append=append,
-                                start=self._parser._start_list):
-                        append((event, start(tag, attrib_in)))
-                    parser.StartElementHandler = handler
-                except AttributeError:
-                    def handler(tag, attrib_in, event=event, append=append,
-                                start=self._parser._start):
-                        append((event, start(tag, attrib_in)))
-                    parser.StartElementHandler = handler
-            elif event == "end":
-                def handler(tag, event=event, append=append,
-                            end=self._parser._end):
-                    append((event, end(tag)))
-                parser.EndElementHandler = handler
-            elif event == "start-ns":
-                def handler(prefix, uri, event=event, append=append):
-                    try:
-                        uri = _encode(uri, "ascii")
-                    except UnicodeError:
-                        pass
-                    append((event, (prefix or "", uri)))
-                parser.StartNamespaceDeclHandler = handler
-            elif event == "end-ns":
-                def handler(prefix, event=event, append=append):
-                    append((event, None))
-                parser.EndNamespaceDeclHandler = handler
-
-    def next(self):
-        while 1:
-            try:
-                item = self._events[self._index]
-            except IndexError:
-                if self._parser is None:
-                    self.root = self._root
-                    try:
-                        raise StopIteration
-                    except NameError:
-                        raise IndexError
-                # load event buffer
-                del self._events[:]
-                self._index = 0
-                data = self._file.read(16384)
-                if data:
-                    self._parser.feed(data)
-                else:
-                    self._root = self._parser.close()
-                    self._parser = None
-            else:
-                self._index = self._index + 1
-                return item
-
-    try:
-        iter
-        def __iter__(self):
-            return self
-    except NameError:
-        def __getitem__(self, index):
-            return self.next()
-
-##
-# Parses an XML document from a string constant.  This function can
-# be used to embed "XML literals" in Python code.
-#
-# @param source A string containing XML data.
-# @return An Element instance.
-# @defreturn Element
-
-def XML(text):
-    parser = XMLTreeBuilder()
-    parser.feed(text)
-    return parser.close()
-
-##
-# Parses an XML document from a string constant, and also returns
-# a dictionary which maps from element id:s to elements.
-#
-# @param source A string containing XML data.
-# @return A tuple containing an Element instance and a dictionary.
-# @defreturn (Element, dictionary)
-
-def XMLID(text):
-    parser = XMLTreeBuilder()
-    parser.feed(text)
-    tree = parser.close()
-    ids = {}
-    for elem in tree.getiterator():
-        id = elem.get("id")
-        if id:
-            ids[id] = elem
-    return tree, ids
-
-##
-# Parses an XML document from a string constant.  Same as {@link #XML}.
-#
-# @def fromstring(text)
-# @param source A string containing XML data.
-# @return An Element instance.
-# @defreturn Element
-
-fromstring = XML
-
-##
-# Generates a string representation of an XML element, including all
-# subelements.
-#
-# @param element An Element instance.
-# @return An encoded string containing the XML data.
-# @defreturn string
-
-def tostring(element, encoding=None):
-    class dummy:
-        pass
-    data = []
-    file = dummy()
-    file.write = data.append
-    ElementTree(element).write(file, encoding)
-    return string.join(data, "")
-
-##
-# Generic element structure builder.  This builder converts a sequence
-# of {@link #TreeBuilder.start}, {@link #TreeBuilder.data}, and {@link
-# #TreeBuilder.end} method calls to a well-formed element structure.
-# <p>
-# You can use this class to build an element structure using a custom XML
-# parser, or a parser for some other XML-like format.
-#
-# @param element_factory Optional element factory.  This factory
-#    is called to create new Element instances, as necessary.
-
-class TreeBuilder:
-
-    def __init__(self, element_factory=None):
-        self._data = [] # data collector
-        self._elem = [] # element stack
-        self._last = None # last element
-        self._tail = None # true if we're after an end tag
-        if element_factory is None:
-            element_factory = _ElementInterface
-        self._factory = element_factory
-
-    ##
-    # Flushes the parser buffers, and returns the toplevel documen
-    # element.
-    #
-    # @return An Element instance.
-    # @defreturn Element
-
-    def close(self):
-        assert len(self._elem) == 0, "missing end tags"
-        assert self._last != None, "missing toplevel element"
-        return self._last
-
-    def _flush(self):
-        if self._data:
-            if self._last is not None:
-                text = string.join(self._data, "")
-                if self._tail:
-                    assert self._last.tail is None, "internal error (tail)"
-                    self._last.tail = text
-                else:
-                    assert self._last.text is None, "internal error (text)"
-                    self._last.text = text
-            self._data = []
-
-    ##
-    # Adds text to the current element.
-    #
-    # @param data A string.  This should be either an 8-bit string
-    #    containing ASCII text, or a Unicode string.
-
-    def data(self, data):
-        self._data.append(data)
-
-    ##
-    # Opens a new element.
-    #
-    # @param tag The element name.
-    # @param attrib A dictionary containing element attributes.
-    # @return The opened element.
-    # @defreturn Element
-
-    def start(self, tag, attrs):
-        self._flush()
-        self._last = elem = self._factory(tag, attrs)
-        if self._elem:
-            self._elem[-1].append(elem)
-        self._elem.append(elem)
-        self._tail = 0
-        return elem
-
-    ##
-    # Closes the current element.
-    #
-    # @param tag The element name.
-    # @return The closed element.
-    # @defreturn Element
-
-    def end(self, tag):
-        self._flush()
-        self._last = self._elem.pop()
-        assert self._last.tag == tag,\
-               "end tag mismatch (expected %s, got %s)" % (
-                   self._last.tag, tag)
-        self._tail = 1
-        return self._last
-
-##
-# Element structure builder for XML source data, based on the
-# <b>expat</b> parser.
-#
-# @keyparam target Target object.  If omitted, the builder uses an
-#     instance of the standard {@link #TreeBuilder} class.
-# @keyparam html Predefine HTML entities.  This flag is not supported
-#     by the current implementation.
-# @see #ElementTree
-# @see #TreeBuilder
-
-class XMLTreeBuilder:
-
-    def __init__(self, html=0, target=None):
-        try:
-            from xml.parsers import expat
-        except ImportError:
-            raise ImportError(
-                "No module named expat; use SimpleXMLTreeBuilder instead"
-                )
-        self._parser = parser = expat.ParserCreate(None, "}")
-        if target is None:
-            target = TreeBuilder()
-        self._target = target
-        self._names = {} # name memo cache
-        # callbacks
-        parser.DefaultHandlerExpand = self._default
-        parser.StartElementHandler = self._start
-        parser.EndElementHandler = self._end
-        parser.CharacterDataHandler = self._data
-        # let expat do the buffering, if supported
-        try:
-            self._parser.buffer_text = 1
-        except AttributeError:
-            pass
-        # use new-style attribute handling, if supported
-        try:
-            self._parser.ordered_attributes = 1
-            self._parser.specified_attributes = 1
-            parser.StartElementHandler = self._start_list
-        except AttributeError:
-            pass
-        encoding = None
-        if not parser.returns_unicode:
-            encoding = "utf-8"
-        # target.xml(encoding, None)
-        self._doctype = None
-        self.entity = {}
-
-    def _fixtext(self, text):
-        # convert text string to ascii, if possible
-        try:
-            return _encode(text, "ascii")
-        except UnicodeError:
-            return text
-
-    def _fixname(self, key):
-        # expand qname, and convert name string to ascii, if possible
-        try:
-            name = self._names[key]
-        except KeyError:
-            name = key
-            if "}" in name:
-                name = "{" + name
-            self._names[key] = name = self._fixtext(name)
-        return name
-
-    def _start(self, tag, attrib_in):
-        fixname = self._fixname
-        tag = fixname(tag)
-        attrib = {}
-        for key, value in attrib_in.items():
-            attrib[fixname(key)] = self._fixtext(value)
-        return self._target.start(tag, attrib)
-
-    def _start_list(self, tag, attrib_in):
-        fixname = self._fixname
-        tag = fixname(tag)
-        attrib = {}
-        if attrib_in:
-            for i in range(0, len(attrib_in), 2):
-                attrib[fixname(attrib_in[i])] = self._fixtext(attrib_in[i+1])
-        return self._target.start(tag, attrib)
-
-    def _data(self, text):
-        return self._target.data(self._fixtext(text))
-
-    def _end(self, tag):
-        return self._target.end(self._fixname(tag))
-
-    def _default(self, text):
-        prefix = text[:1]
-        if prefix == "&":
-            # deal with undefined entities
-            try:
-                self._target.data(self.entity[text[1:-1]])
-            except KeyError:
-                from xml.parsers import expat
-                raise expat.error(
-                    "undefined entity %s: line %d, column %d" %
-                    (text, self._parser.ErrorLineNumber,
-                    self._parser.ErrorColumnNumber)
-                    )
-        elif prefix == "<" and text[:9] == "<!DOCTYPE":
-            self._doctype = [] # inside a doctype declaration
-        elif self._doctype is not None:
-            # parse doctype contents
-            if prefix == ">":
-                self._doctype = None
-                return
-            text = string.strip(text)
-            if not text:
-                return
-            self._doctype.append(text)
-            n = len(self._doctype)
-            if n > 2:
-                type = self._doctype[1]
-                if type == "PUBLIC" and n == 4:
-                    name, type, pubid, system = self._doctype
-                elif type == "SYSTEM" and n == 3:
-                    name, type, system = self._doctype
-                    pubid = None
-                else:
-                    return
-                if pubid:
-                    pubid = pubid[1:-1]
-                self.doctype(name, pubid, system[1:-1])
-                self._doctype = None
-
-    ##
-    # Handles a doctype declaration.
-    #
-    # @param name Doctype name.
-    # @param pubid Public identifier.
-    # @param system System identifier.
-
-    def doctype(self, name, pubid, system):
-        pass
-
-    ##
-    # Feeds data to the parser.
-    #
-    # @param data Encoded data.
-
-    def feed(self, data):
-        self._parser.Parse(data, 0)
-
-    ##
-    # Finishes feeding data to the parser.
-    #
-    # @return An element structure.
-    # @defreturn Element
-
-    def close(self):
-        self._parser.Parse("", 1) # end of data
-        tree = self._target.close()
-        del self._target, self._parser # get rid of circular references
-        return tree

+ 1 - 1
direct/src/showbase/HTMLTree.py

@@ -1,4 +1,4 @@
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 
 class HTMLTree(ET.ElementTree):
     def __init__(self, title):

+ 0 - 1
direct/src/showbase/PythonUtil.py

@@ -51,7 +51,6 @@ import traceback
 import __builtin__
 from StringIO import StringIO
 import marshal
-import ElementTree as ET
 import BpDb
 import unicodedata
 import bisect

+ 0 - 71
direct/src/test/ModelScreenShot.py

@@ -1,71 +0,0 @@
-import direct
-from pandac.PandaModules import loadPrcFileData
-
-from direct.showbase.DirectObject import DirectObject
-from direct.directbase.DirectStart import *
-from pandac.PandaModules import *
-import direct.gui.DirectGuiGlobals as DGG
-from direct.gui.DirectGui import *
-from direct.task import Task
-
-from direct.directnotify import DirectNotifyGlobal
-import math
-from operator import *
-
-import ModelScreenShotGlobals
-
-class ModelScreenShot(DirectObject):
-    notify = DirectNotifyGlobal.directNotify.newCategory("ModelScreenShot")
-
-    def __init__(self):
-
-        # Grab a list of models to capture screenshots of from an array in
-        # the globals file
-        self.modelsToView = ModelScreenShotGlobals.models
-        self.models = []
-
-        # Attach all the models listed to render and save a pointer to them
-        # in an array.  Then hide the model.
-        for model in self.modelsToView:
-            m = loader.loadModel(model)
-            m.reparentTo(render)
-            self.models.append(m)
-            m.hide()
-
-        # Set a nice farplane far, far away
-        self.lens = base.camera.getChild(0).node().getLens()
-        self.lens.setFar(10000)
-
-        # Hide the cursor
-        self.props = WindowProperties()
-        self.props.setCursorHidden(0)
-        base.win.requestProperties(self.props)
-
-        # Method for getting the distance to an object from the camera
-        def getDist(obj, lens):
-            rad = obj.getBounds().getRadius()
-            fov = lens.getFov()
-            dist = rad / math.tan(deg2Rad(min(fov[0], fov[1]/2.0)))
-            return dist
-
-        # Determin the optimal camera position
-        def getOptCamPos(obj, dist):
-            cen = obj.getBounds().getCenter()
-            camPos = VBase3(cen.getX(), -dist, cen.getZ())
-            return camPos
-
-        # Generate screenshots
-        def generatePics():
-            for model in self.models:
-                model.show()
-                base.camera.setPos(getOptCamPos(model, getDist(model, self.lens)))
-                uFilename = model.getName().replace('.egg','.jpg')
-                self.notify.info("screenshot %s   camera pos: %s" % (uFilename, base.camera.getPos()))
-                base.graphicsEngine.renderFrame()
-                base.screenshot(namePrefix = uFilename, defaultFilename = 0)
-                model.hide()
-
-        generatePics()
-        
-mss = ModelScreenShot()
-run()

+ 0 - 6
direct/src/test/ModelScreenShotGlobals.py

@@ -1,6 +0,0 @@
-# Replace these with the models you want to screenshot
-models = [
-    'models/misc/smiley',
-    'models/misc/sphere',
-    'models/misc/xyzAxis'
-]

+ 0 - 0
direct/src/test/Sources.pp


+ 0 - 0
direct/src/test/__init__.py


+ 0 - 12
dtool/LocalSetup.pp

@@ -293,18 +293,9 @@ $[cdefine PYTHON_FRAMEWORK]
 /* Define if we have RAD game tools, Miles Sound System installed.  */
 $[cdefine HAVE_RAD_MSS]
 
-/* Define if we have FMODex installed. */
-$[cdefine HAVE_FMODEX]
-
-/* Define if we have OpenAL installed. */
-$[cdefine HAVE_OPENAL]
-
 /* Define if we have Freetype 2.0 or better available. */
 $[cdefine HAVE_FREETYPE]
 
-/* Define if we are using SpeedTree. */
-$[cdefine HAVE_SPEEDTREE]
-
 /* Define if we want to compile in a default font. */
 $[cdefine COMPILE_IN_DEFAULT_FONT]
 
@@ -413,9 +404,6 @@ $[cdefine HAVE_FFMPEG]
 $[cdefine HAVE_SWSCALE]
 $[cdefine HAVE_SWRESAMPLE]
 
-/* Define if we have ODE installed and want to build for ODE.  */
-$[cdefine HAVE_ODE]
-
 /* Define if we have AWESOMIUM installed and want to build for AWESOMIUM.  */
 $[cdefine HAVE_AWESOMIUM]
 

+ 6 - 1
dtool/src/dtoolbase/dtoolbase_cc.h

@@ -132,7 +132,7 @@ typedef ios::seekdir ios_seekdir;
 #  else
 #    define NOEXCEPT
 #  endif
-#  if __has_extension(cxx_rvalue_references)
+#  if __has_extension(cxx_rvalue_references) && (__cplusplus >= 201103L)
 #    define USE_MOVE_SEMANTICS
 #  endif
 #elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7)) && (__cplusplus >= 201103L)
@@ -141,6 +141,11 @@ typedef ios::seekdir ios_seekdir;
 #  define CONSTEXPR constexpr
 #  define NOEXCEPT noexcept
 #  define USE_MOVE_SEMANTICS
+#elif defined(_MSC_VER) && _MSC_VER >= 1600
+// MSVC 2010 has move semantics.
+#  define CONSTEXPR INLINE
+#  define NOEXCEPT throw()
+#  define USE_MOVE_SEMANTICS
 #else
 #  define CONSTEXPR INLINE
 #  define NOEXCEPT

+ 22 - 0
dtool/src/dtoolbase/stl_compares.I

@@ -234,3 +234,25 @@ INLINE size_t indirect_method_hash<Key, Compare>::
 operator () (const Key &key) const {
   return (*key).get_hash();
 }
+
+////////////////////////////////////////////////////////////////////
+//     Function: indirect_equals_hash::operator ()
+//       Access: Public
+//  Description: Calls the Key's get_hash() method.
+////////////////////////////////////////////////////////////////////
+template<class Key>
+INLINE size_t indirect_equals_hash<Key>::
+operator () (const Key &key) const {
+  return (*key).get_hash();
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: indirect_equals_hash::is_equal
+//       Access: Public
+//  Description: Returns true if a is equal to b, false otherwise.
+////////////////////////////////////////////////////////////////////
+template<class Key>
+INLINE bool indirect_equals_hash<Key>::
+is_equal(const Key &a, const Key &b) const {
+  return (a == b || (*a) == (*b));
+}

+ 17 - 3
dtool/src/dtoolbase/stl_compares.h

@@ -33,7 +33,6 @@ public:
   }
 };
 
-
 #else
 
 #include <map>  // for less
@@ -212,6 +211,23 @@ public:
   }
 };
 
+////////////////////////////////////////////////////////////////////
+//       Class : indirect_equals_hash
+// Description : An STL function object class, this is intended to be
+//               used on any ordered collection of pointers to classes
+//               that contain an operator ==() method.  It defines
+//               the equality of the pointers via operator ==().
+//
+//               Since it doesn't define the ordering of the pointers,
+//               it can only be used with hash containers.
+////////////////////////////////////////////////////////////////////
+template<class Key>
+class indirect_equals_hash {
+public:
+  INLINE size_t operator () (const Key &key) const;
+  INLINE bool is_equal(const Key &a, const Key &b) const;
+};
+
 #include "stl_compares.I"
 
 typedef floating_point_hash<float> float_hash;
@@ -234,5 +250,3 @@ class indirect_compare_names_hash : public indirect_method_hash<Key, indirect_co
 };
 
 #endif
-
-

+ 1 - 2
dtool/src/dtoolbase/typeHandle.h

@@ -16,7 +16,6 @@
 #define TYPEHANDLE_H
 
 #include "dtoolbase.h"
-#include "typeRegistry.h"
 
 #include <set>
 
@@ -152,7 +151,7 @@ private:
   int _index;
   static TypeHandle _none;
 
-friend class TypeRegistry;
+  friend class TypeRegistry;
 };
 
 

+ 27 - 0
dtool/src/dtoolbase/typeRegistry.I

@@ -39,3 +39,30 @@ init_lock() {
     _lock = new MutexImpl;
   }
 }
+
+////////////////////////////////////////////////////////////////////
+//     Function: TypeRegistry::look_up
+//       Access: Private
+//  Description: Returns the TypeRegistryNode associated with the
+//               indicated TypeHandle.  If there is no associated
+//               TypeRegistryNode, reports an error condition and
+//               returns NULL.
+//
+//               The associated TypedObject pointer is the pointer to
+//               the object that owns the handle, if available.  It is
+//               only used in an error condition, if for some reason
+//               the handle was uninitialized.
+//
+//               Assumes the lock is already held.
+////////////////////////////////////////////////////////////////////
+INLINE TypeRegistryNode *TypeRegistry::
+look_up(TypeHandle handle, TypedObject *object) const {
+#ifndef NDEBUG
+  if (handle._index >= (int)_handle_registry.size() ||
+      handle._index <= 0) {
+    // Invalid or uninitialized type handle.
+    return look_up_invalid(handle, object);
+  }
+#endif
+  return _handle_registry[handle._index];
+}

+ 7 - 12
dtool/src/dtoolbase/typeRegistry.cxx

@@ -498,7 +498,7 @@ get_parent_towards(TypeHandle child, TypeHandle base,
   TypeHandle handle;
   const TypeRegistryNode *child_node = look_up(child, child_object);
   const TypeRegistryNode *base_node = look_up(base, NULL);
-  assert(child_node != (TypeRegistryNode *)NULL && 
+  assert(child_node != (TypeRegistryNode *)NULL &&
          base_node != (TypeRegistryNode *)NULL);
   freshen_derivations();
   handle = TypeRegistryNode::get_parent_towards(child_node, base_node);
@@ -688,22 +688,17 @@ write_node(ostream &out, int indent_level, const TypeRegistryNode *node) const {
 }
 
 ////////////////////////////////////////////////////////////////////
-//     Function: TypeRegistry::look_up
+//     Function: TypeRegistry::look_up_invalid
 //       Access: Private
-//  Description: Returns the TypeRegistryNode associated with the
-//               indicated TypeHandle.  If there is no associated
-//               TypeRegistryNode, reports an error condition and
-//               returns NULL.
-//
-//               The associated TypedObject pointer is the pointer to
-//               the object that owns the handle, if available.  It is
-//               only used in an error condition, if for some reason
-//               the handle was uninitialized.
+//  Description: Called by look_up when it detects an invalid
+//               TypeHandle pointer.  In non-release builds, this
+//               method will do what it can to recover from this
+//               and initialize the type anyway.
 //
 //               Assumes the lock is already held.
 ////////////////////////////////////////////////////////////////////
 TypeRegistryNode *TypeRegistry::
-look_up(TypeHandle handle, TypedObject *object) const {
+look_up_invalid(TypeHandle handle, TypedObject *object) const {
 #ifndef NDEBUG
   if (handle._index == 0) {
     // The TypeHandle is unregistered.  This is an error condition.

+ 4 - 1
dtool/src/dtoolbase/typeRegistry.h

@@ -88,7 +88,8 @@ private:
   TypeRegistry();
 
   static void init_global_pointer();
-  TypeRegistryNode *look_up(TypeHandle type, TypedObject *object) const;
+  INLINE TypeRegistryNode *look_up(TypeHandle type, TypedObject *object) const;
+  TypeRegistryNode *look_up_invalid(TypeHandle type, TypedObject *object) const;
 
   INLINE void freshen_derivations();
   void rebuild_derivations();
@@ -120,6 +121,8 @@ private:
 // Helper function to allow for "C" interaction into the type system
 extern "C" EXPCL_DTOOL  int get_best_parent_from_Set(int id, const std::set<int> &this_set);
 
+#include "typeHandle.h"
+
 #include "typeRegistry.I"
 
 #endif

+ 0 - 5
dtool/src/dtoolbase/typeRegistryNode.cxx

@@ -48,11 +48,6 @@ is_derived_from(const TypeRegistryNode *child, const TypeRegistryNode *base) {
   // code.  Therefore, we go through some pains to make this function
   // as efficient as possible.
 
-  // (Actually, it appears that the function is not called as often as
-  // I'd first thought, and it wasn't really all that expensive to
-  // begin with.  So much of this complexity is of limited usefulness.
-  // Oh well.)
-
   // First, compare the subtree tops.  If they are the same, then this
   // node and the base node are within the same single-inheritance
   // subtree, and we can use our bitmask trick to determine the

+ 4 - 5
makepanda/makepanda.py

@@ -1953,7 +1953,7 @@ DTOOL_CONFIG=[
     ("DO_PSTATS",                      'UNDEF',                  'UNDEF'),
     ("DO_DCAST",                       'UNDEF',                  'UNDEF'),
     ("DO_COLLISION_RECORDING",         'UNDEF',                  'UNDEF'),
-    ("SUPPORT_IMMEDIATE_MODE",         '1',                      '1'),
+    ("SUPPORT_IMMEDIATE_MODE",         'UNDEF',                  'UNDEF'),
     ("TRACK_IN_INTERPRETER",           'UNDEF',                  'UNDEF'),
     ("DO_MEMORY_USAGE",                'UNDEF',                  'UNDEF'),
     ("DO_PIPELINING",                  '1',                      '1'),
@@ -2037,11 +2037,8 @@ DTOOL_CONFIG=[
     ("HAVE_BMP",                       '1',                      '1'),
     ("HAVE_PNM",                       '1',                      '1'),
     ("HAVE_VORBIS",                    'UNDEF',                  'UNDEF'),
-    ("HAVE_FMODEX",                    'UNDEF',                  'UNDEF'),
-    ("HAVE_OPENAL",                    'UNDEF',                  'UNDEF'),
     ("HAVE_NVIDIACG",                  'UNDEF',                  'UNDEF'),
     ("HAVE_FREETYPE",                  'UNDEF',                  'UNDEF'),
-    ("HAVE_SPEEDTREE",                 'UNDEF',                  'UNDEF'),
     ("HAVE_FFTW",                      'UNDEF',                  'UNDEF'),
     ("HAVE_OPENSSL",                   'UNDEF',                  'UNDEF'),
     ("HAVE_NET",                       'UNDEF',                  'UNDEF'),
@@ -2053,7 +2050,6 @@ DTOOL_CONFIG=[
     ("HAVE_SWSCALE",                   'UNDEF',                  'UNDEF'),
     ("HAVE_SWRESAMPLE",                'UNDEF',                  'UNDEF'),
     ("HAVE_ARTOOLKIT",                 'UNDEF',                  'UNDEF'),
-    ("HAVE_ODE",                       'UNDEF',                  'UNDEF'),
     ("HAVE_OPENCV",                    'UNDEF',                  'UNDEF'),
     ("HAVE_DIRECTCAM",                 'UNDEF',                  'UNDEF'),
     ("HAVE_SQUISH",                    'UNDEF',                  'UNDEF'),
@@ -3126,6 +3122,9 @@ if (not RUNTIME):
   TargetAdd('p3mathutil_composite1.obj', opts=OPTS, input='p3mathutil_composite1.cxx')
   TargetAdd('p3mathutil_composite2.obj', opts=OPTS, input='p3mathutil_composite2.cxx')
   IGATEFILES=GetDirectoryContents('panda/src/mathutil', ["*.h", "*_composite*.cxx"])
+  for ifile in IGATEFILES[:]:
+      if "_src." in ifile:
+          IGATEFILES.remove(ifile)
   TargetAdd('libp3mathutil.in', opts=OPTS, input=IGATEFILES)
   TargetAdd('libp3mathutil.in', opts=['IMOD:panda3d.core', 'ILIB:libp3mathutil', 'SRCDIR:panda/src/mathutil'])
   TargetAdd('libp3mathutil_igate.obj', input='libp3mathutil.in', opts=["DEPENDENCYONLY"])

+ 2 - 2
makepanda/makepandacore.py

@@ -525,9 +525,9 @@ def oscmd(cmd, ignoreError = False):
         if "interrogate" in cmd.split(" ", 1)[0] and GetVerbose():
             print(ColorText("red", "Interrogate failed, retrieving debug output..."))
             if sys.platform == "win32":
-                os.spawnl(os.P_WAIT, exe, cmd.split(" ", 1)[0] + " -v " + cmd.split(" ", 1)[1])
+                os.spawnl(os.P_WAIT, exe, cmd.split(" ", 1)[0] + " -vv " + cmd.split(" ", 1)[1])
             else:
-                os.system(cmd.split(" ", 1)[0] + " -v " + cmd.split(" ", 1)[1])
+                os.system(cmd.split(" ", 1)[0] + " -vv " + cmd.split(" ", 1)[1])
         exit("The following command returned a non-zero value: " + str(cmd))
 
     return res

+ 0 - 3
panda/src/audiotraits/config_fmodAudio.cxx

@@ -13,7 +13,6 @@
 ////////////////////////////////////////////////////////////////////
 
 #include "pandabase.h"
-#ifdef HAVE_FMODEX //[
 
 #include "config_fmodAudio.h"
 #include "audioManager.h"
@@ -73,5 +72,3 @@ get_audio_manager_func_fmod_audio() {
   init_libFmodAudio();
   return &Create_FmodAudioManager;
 }
-
-#endif //]

+ 0 - 3
panda/src/audiotraits/config_fmodAudio.h

@@ -17,7 +17,6 @@
 
 #include "pandabase.h"
 
-#ifdef HAVE_FMODEX //[
 #include "notifyCategoryProxy.h"
 #include "dconfig.h"
 #include "audioManager.h"
@@ -30,6 +29,4 @@ extern ConfigVariableInt fmod_audio_preload_threshold;
 extern EXPCL_FMOD_AUDIO void init_libFmodAudio();
 extern "C" EXPCL_FMOD_AUDIO Create_AudioManager_proc *get_audio_manager_func_fmod_audio();
 
-#endif //]
-
 #endif // CONFIG_FMODAUDIO_H

+ 0 - 4
panda/src/audiotraits/config_openalAudio.cxx

@@ -13,8 +13,6 @@
 ////////////////////////////////////////////////////////////////////
 
 #include "pandabase.h"
-#ifdef HAVE_OPENAL //[
-
 
 #include "config_openalAudio.h"
 #include "openalAudioManager.h"
@@ -65,5 +63,3 @@ get_audio_manager_func_openal_audio() {
   init_libOpenALAudio();
   return &Create_OpenALAudioManager;
 }
-
-#endif //]

+ 0 - 3
panda/src/audiotraits/config_openalAudio.h

@@ -17,7 +17,6 @@
 
 #include "pandabase.h"
 
-#ifdef HAVE_OPENAL //[
 #include "notifyCategoryProxy.h"
 #include "dconfig.h"
 #include "audioManager.h"
@@ -28,6 +27,4 @@ NotifyCategoryDecl(openalAudio, EXPCL_OPENAL_AUDIO, EXPTP_OPENAL_AUDIO);
 extern EXPCL_OPENAL_AUDIO void init_libOpenALAudio();
 extern "C" EXPCL_OPENAL_AUDIO Create_AudioManager_proc *get_audio_manager_func_openal_audio();
 
-#endif //]
-
 #endif // CONFIG_OPENALAUDIO_H

+ 0 - 4
panda/src/audiotraits/fmodAudioManager.cxx

@@ -20,8 +20,6 @@
 #include "config_fmodAudio.h"
 #include "dcast.h"
 
-#ifdef HAVE_FMODEX //[
-
 //Panda headers.
 #include "config_audio.h"
 #include "config_util.h"
@@ -897,5 +895,3 @@ get_cache_limit() const {
   //return _cache_limit;
   return 0;
 }
-
-#endif //]

+ 0 - 4
panda/src/audiotraits/fmodAudioManager.h

@@ -76,8 +76,6 @@
 #include "pandabase.h"
 #include "pset.h"
 
-#ifdef HAVE_FMODEX //[
-
 #include "audioManager.h"
 
 //The Includes needed for FMOD
@@ -235,6 +233,4 @@ private:
 EXPCL_FMOD_AUDIO AudioManager *Create_FmodAudioManager();
 
 
-#endif //]
-
 #endif /* __FMOD_AUDIO_MANAGER_H__ */

+ 0 - 5
panda/src/audiotraits/fmodAudioSound.cxx

@@ -19,8 +19,6 @@
 #include "pandabase.h"
 #include "dcast.h"
 
-#ifdef HAVE_FMODEX //[
-
 //Panda Headers
 #include "config_audio.h"
 #include "config_fmodAudio.h"
@@ -1107,6 +1105,3 @@ seek_callback(void *handle, unsigned int pos, void *user_data) {
     return FMOD_OK;
   }
 }
-
-
-#endif //]

+ 0 - 11
panda/src/audiotraits/fmodAudioSound.h

@@ -66,15 +66,11 @@
 //
 ////////////////////////////////////////////////////////////////////
 
-
-
 #ifndef __FMOD_AUDIO_SOUND_H__
 #define __FMOD_AUDIO_SOUND_H__
 
 #include <pandabase.h>
 
-#ifdef HAVE_FMODEX //[
-
 #include "audioSound.h"
 #include "reMutex.h"
 #include "fmodAudioManager.h"
@@ -257,11 +253,4 @@ class EXPCL_FMOD_AUDIO FmodAudioSound : public AudioSound {
 
 #include "fmodAudioSound.I"
 
-#endif //]
-
 #endif /* __FMOD_AUDIO_SOUND_H__ */
-
-
-
-
-

+ 0 - 1
panda/src/audiotraits/fmod_audio_composite1.cxx

@@ -2,4 +2,3 @@
 #include "config_fmodAudio.cxx"
 #include "fmodAudioManager.cxx"
 #include "fmodAudioSound.cxx"
-      

+ 0 - 4
panda/src/audiotraits/openalAudioManager.cxx

@@ -15,8 +15,6 @@
 
 #include "pandabase.h"
 
-#ifdef HAVE_OPENAL //[
-
 //Panda headers.
 #include "config_audio.h"
 #include "config_util.h"
@@ -1109,5 +1107,3 @@ discard_excess_cache(int sample_limit) {
     delete sd;
   }
 }
-
-#endif //]

+ 20 - 24
panda/src/audiotraits/openalAudioManager.h

@@ -18,7 +18,6 @@
 #define __OPENAL_AUDIO_MANAGER_H__
 
 #include "pandabase.h"
-#ifdef HAVE_OPENAL //[
 
 #include "audioManager.h"
 #include "plist.h"
@@ -43,11 +42,11 @@ extern void alc_audio_errcheck(const char *context,ALCdevice* device);
 
 class EXPCL_OPENAL_AUDIO OpenALAudioManager : public AudioManager {
   class SoundData;
-  
+
   friend class OpenALAudioSound;
   friend class OpenALSoundData;
- public:
 
+ public:
   //Constructor and Destructor
   OpenALAudioManager();
   virtual ~OpenALAudioManager();
@@ -55,18 +54,18 @@ class EXPCL_OPENAL_AUDIO OpenALAudioManager : public AudioManager {
   virtual void shutdown();
 
   virtual bool is_valid();
-          
+
   virtual PT(AudioSound) get_sound(const string&,     bool positional = false, int mode=SM_heuristic);
   virtual PT(AudioSound) get_sound(MovieAudio *sound, bool positional = false, int mode=SM_heuristic);
-  
+
   virtual void uncache_sound(const string&);
   virtual void clear_cache();
   virtual void set_cache_limit(unsigned int count);
   virtual unsigned int get_cache_limit() const;
-    
+
   virtual void set_volume(PN_stdfloat);
   virtual PN_stdfloat get_volume() const;
-          
+
   void set_play_rate(PN_stdfloat play_rate);
   PN_stdfloat get_play_rate() const;
 
@@ -80,7 +79,7 @@ class EXPCL_OPENAL_AUDIO OpenALAudioManager : public AudioManager {
   // ux, uy and uz are the respective components of a unit up-vector
   // These changes will NOT be invoked until audio_3d_update() is called.
   virtual void audio_3d_set_listener_attributes(PN_stdfloat px, PN_stdfloat py, PN_stdfloat pz,
-                                                PN_stdfloat vx, PN_stdfloat xy, PN_stdfloat xz, 
+                                                PN_stdfloat vx, PN_stdfloat xy, PN_stdfloat xz,
                                                 PN_stdfloat fx, PN_stdfloat fy, PN_stdfloat fz,
                                                 PN_stdfloat ux, PN_stdfloat uy, PN_stdfloat uz);
 
@@ -88,7 +87,7 @@ class EXPCL_OPENAL_AUDIO OpenALAudioManager : public AudioManager {
                                                 PN_stdfloat *vx, PN_stdfloat *vy, PN_stdfloat *vz,
                                                 PN_stdfloat *fx, PN_stdfloat *fy, PN_stdfloat *fz,
                                                 PN_stdfloat *ux, PN_stdfloat *uy, PN_stdfloat *uz);
-          
+
   // Control the "relative distance factor" for 3D spacialized audio in units-per-foot. Default is 1.0
   // OpenAL has no distance factor but we use this as a scale
   // on the min/max distances of sounds to preserve FMOD compatibility.
@@ -121,7 +120,7 @@ private:
 
   bool can_use_audio(MovieAudioCursor *source);
   bool should_load_audio(MovieAudioCursor *source, int mode);
-  
+
   SoundData *get_sound_data(MovieAudio *source, int mode);
 
   // Tell the manager that the sound dtor was called.
@@ -129,12 +128,12 @@ private:
   void increment_client_count(SoundData *sd);
   void decrement_client_count(SoundData *sd);
   void discard_excess_cache(int limit);
-  
+
   void starting_sound(OpenALAudioSound* audio);
   void stopping_sound(OpenALAudioSound* audio);
-  
+
   void cleanup();
-  
+
 private:
   // This global lock protects all access to OpenAL library interfaces.
   static ReMutex _lock;
@@ -144,7 +143,7 @@ private:
   // around for a little while, since it is common to
   // stop using a sound for a brief moment and then
   // quickly resume.
-  
+
   typedef plist<void *> ExpirationQueue;
   ExpirationQueue _expiring_samples;
   ExpirationQueue _expiring_streams;
@@ -175,16 +174,16 @@ private:
     ExpirationQueue::iterator _expire;
   };
 
-  
+
   typedef phash_map<string, SoundData *> SampleCache;
   SampleCache _sample_cache;
-  
+
   typedef phash_set<PT(OpenALAudioSound)> SoundsPlaying;
   SoundsPlaying _sounds_playing;
 
   typedef phash_set<OpenALAudioSound *> AllSounds;
   AllSounds _all_sounds;
-  
+
   // State:
   int _cache_limit;
   PN_stdfloat _volume;
@@ -195,13 +194,13 @@ private:
   static int _active_managers;
   static bool _openal_active;
   unsigned int _concurrent_sound_limit;
-  
+
   bool _is_valid;
-  
+
   typedef pset<OpenALAudioManager *> Managers;
   static Managers *_managers;
 
-  static ALCdevice* _device; 
+  static ALCdevice* _device;
   static ALCcontext* _context;
 
   // cache of openal sources, use only for playing sounds
@@ -232,7 +231,7 @@ private:
     return get_class_type();
   }
   virtual TypeHandle force_init_type() {
-    init_type(); 
+    init_type();
     return get_class_type();
   }
 
@@ -247,7 +246,4 @@ private:
 
 EXPCL_OPENAL_AUDIO AudioManager *Create_OpenALAudioManager();
 
-
-#endif //]
-
 #endif /* __OPENAL_AUDIO_MANAGER_H__ */

+ 0 - 4
panda/src/audiotraits/openalAudioSound.cxx

@@ -15,8 +15,6 @@
 
 #include "pandabase.h"
 
-#ifdef HAVE_OPENAL //[
-
 //Panda Headers
 #include "throw_event.h"
 #include "openalAudioSound.h"
@@ -916,5 +914,3 @@ status() const {
     return AudioSound::PLAYING;
   }
 }
-
-#endif //]

+ 0 - 6
panda/src/audiotraits/openalAudioSound.h

@@ -13,15 +13,11 @@
 //
 ////////////////////////////////////////////////////////////////////
 
-
-
 #ifndef __OPENAL_AUDIO_SOUND_H__
 #define __OPENAL_AUDIO_SOUND_H__
 
 #include "pandabase.h"
 
-#ifdef HAVE_OPENAL //[
-
 #include "audioSound.h"
 #include "movieAudioCursor.h"
 #include "trueClock.h"
@@ -232,6 +228,4 @@ private:
 
 #include "openalAudioSound.I"
 
-#endif //]
-
 #endif /* __OPENAL_AUDIO_SOUND_H__ */

+ 1 - 1
panda/src/cull/drawCullHandler.cxx

@@ -19,7 +19,7 @@
 #include "renderState.h"
 #include "graphicsStateGuardianBase.h"
 #include "config_pgraph.h"
-
+#include "cullTraverser.h"
 
 ////////////////////////////////////////////////////////////////////
 //     Function: DrawCullHandler::record_object

+ 1 - 3
panda/src/cull/drawCullHandler.h

@@ -35,7 +35,7 @@ class EXPCL_PANDA_CULL DrawCullHandler : public CullHandler {
 public:
   INLINE DrawCullHandler(GraphicsStateGuardianBase *gsg);
 
-  virtual void record_object(CullableObject *object, 
+  virtual void record_object(CullableObject *object,
                              const CullTraverser *traverser);
 
 private:
@@ -46,5 +46,3 @@ private:
 
 #endif
 
-
-  

+ 35 - 13
panda/src/display/graphicsStateGuardian.cxx

@@ -365,6 +365,9 @@ set_coordinate_system(CoordinateSystem cs) {
   if (cs == CS_default) {
     cs = get_default_coordinate_system();
   }
+  if (_coordinate_system == cs) {
+    return;
+  }
   _coordinate_system = cs;
 
   // Changing the external coordinate system changes the cs_transform.
@@ -1041,7 +1044,8 @@ fetch_specified_part(Shader::ShaderMatInput part, InternalName *name, LMatrix4 &
     return &t;
   }
   case Shader::SMO_attr_material: {
-    const MaterialAttrib *target_material = DCAST(MaterialAttrib, _target_rs->get_attrib_def(MaterialAttrib::get_class_slot()));
+    const MaterialAttrib *target_material = (const MaterialAttrib *)
+      _target_rs->get_attrib_def(MaterialAttrib::get_class_slot());
     // Material matrix contains AMBIENT, DIFFUSE, EMISSION, SPECULAR+SHININESS
     if (target_material->is_off()) {
       t = LMatrix4(1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0);
@@ -1060,7 +1064,8 @@ fetch_specified_part(Shader::ShaderMatInput part, InternalName *name, LMatrix4 &
     return &t;
   }
   case Shader::SMO_attr_color: {
-    const ColorAttrib *target_color = DCAST(ColorAttrib, _target_rs->get_attrib_def(ColorAttrib::get_class_slot()));
+    const ColorAttrib *target_color = (const ColorAttrib *)
+      _target_rs->get_attrib_def(ColorAttrib::get_class_slot());
     if (target_color->get_color_type() != ColorAttrib::T_flat) {
       return &LMatrix4::ones_mat();
     }
@@ -1069,7 +1074,8 @@ fetch_specified_part(Shader::ShaderMatInput part, InternalName *name, LMatrix4 &
     return &t;
   }
   case Shader::SMO_attr_colorscale: {
-    const ColorScaleAttrib *target_color = DCAST(ColorScaleAttrib, _target_rs->get_attrib_def(ColorScaleAttrib::get_class_slot()));
+    const ColorScaleAttrib *target_color = (const ColorScaleAttrib *)
+      _target_rs->get_attrib_def(ColorScaleAttrib::get_class_slot());
     if (target_color->is_identity()) {
       return &LMatrix4::ones_mat();
     }
@@ -1078,7 +1084,8 @@ fetch_specified_part(Shader::ShaderMatInput part, InternalName *name, LMatrix4 &
     return &t;
   }
   case Shader::SMO_attr_fog: {
-    const FogAttrib *target_fog = DCAST(FogAttrib, _target_rs->get_attrib_def(FogAttrib::get_class_slot()));
+    const FogAttrib *target_fog = (const FogAttrib *)
+      _target_rs->get_attrib_def(FogAttrib::get_class_slot());
     Fog *fog = target_fog->get_fog();
     if (fog == (Fog*) NULL) {
       return &LMatrix4::ones_mat();
@@ -1089,7 +1096,8 @@ fetch_specified_part(Shader::ShaderMatInput part, InternalName *name, LMatrix4 &
     return &t;
   }
   case Shader::SMO_attr_fogcolor: {
-    const FogAttrib *target_fog = DCAST(FogAttrib, _target_rs->get_attrib_def(FogAttrib::get_class_slot()));
+    const FogAttrib *target_fog = (const FogAttrib *)
+      _target_rs->get_attrib_def(FogAttrib::get_class_slot());
     Fog *fog = target_fog->get_fog();
     if (fog == (Fog*) NULL) {
       return &LMatrix4::ones_mat();
@@ -2074,8 +2082,12 @@ get_render_buffer(int buffer_type, const FrameBufferProperties &prop) {
 ////////////////////////////////////////////////////////////////////
 CPT(TransformState) GraphicsStateGuardian::
 get_cs_transform_for(CoordinateSystem cs) const {
-  if (_internal_coordinate_system == CS_default ||
-      _internal_coordinate_system == cs) {
+  if (_coordinate_system == cs) {
+    // We've already calculated this.
+    return _cs_transform;
+
+  } else if (_internal_coordinate_system == CS_default ||
+             _internal_coordinate_system == cs) {
     return TransformState::make_identity();
 
   } else {
@@ -2111,7 +2123,9 @@ do_issue_clip_plane() {
   int num_enabled = 0;
   int num_on_planes = 0;
 
-  const ClipPlaneAttrib *target_clip_plane = DCAST(ClipPlaneAttrib, _target_rs->get_attrib_def(ClipPlaneAttrib::get_class_slot()));
+  const ClipPlaneAttrib *target_clip_plane = (const ClipPlaneAttrib *)
+    _target_rs->get_attrib_def(ClipPlaneAttrib::get_class_slot());
+
   if (target_clip_plane != (ClipPlaneAttrib *)NULL) {
     CPT(ClipPlaneAttrib) new_plane = target_clip_plane->filter_to_max(_max_clip_planes);
 
@@ -2171,7 +2185,9 @@ do_issue_clip_plane() {
 ////////////////////////////////////////////////////////////////////
 void GraphicsStateGuardian::
 do_issue_color() {
-  const ColorAttrib *target_color = DCAST(ColorAttrib, _target_rs->get_attrib_def(ColorAttrib::get_class_slot()));
+  const ColorAttrib *target_color = (const ColorAttrib *)
+    _target_rs->get_attrib_def(ColorAttrib::get_class_slot());
+
   switch (target_color->get_color_type()) {
   case ColorAttrib::T_flat:
     // Color attribute flat: it specifies a scene graph color that
@@ -2219,7 +2235,9 @@ do_issue_color_scale() {
     _state_mask.clear_bit(TextureAttrib::get_class_slot());
   }
 
-  const ColorScaleAttrib *target_color_scale = DCAST(ColorScaleAttrib, _target_rs->get_attrib_def(ColorScaleAttrib::get_class_slot()));
+  const ColorScaleAttrib *target_color_scale = (const ColorScaleAttrib *)
+    _target_rs->get_attrib_def(ColorScaleAttrib::get_class_slot());
+
   _color_scale_enabled = target_color_scale->has_scale();
   _current_color_scale = target_color_scale->get_scale();
   _has_texture_alpha_scale = false;
@@ -2278,7 +2296,9 @@ do_issue_light() {
   int num_enabled = 0;
   int num_on_lights = 0;
 
-  const LightAttrib *target_light = DCAST(LightAttrib, _target_rs->get_attrib_def(LightAttrib::get_class_slot()));
+  const LightAttrib *target_light = (const LightAttrib *)
+    _target_rs->get_attrib_def(LightAttrib::get_class_slot());
+
   if (display_cat.is_spam()) {
     display_cat.spam()
       << "do_issue_light: " << target_light << "\n";
@@ -2639,8 +2659,10 @@ end_bind_clip_planes() {
 ////////////////////////////////////////////////////////////////////
 void GraphicsStateGuardian::
 determine_target_texture() {
-  const TextureAttrib *target_texture = DCAST(TextureAttrib, _target_rs->get_attrib_def(TextureAttrib::get_class_slot()));
-  const TexGenAttrib *target_tex_gen = DCAST(TexGenAttrib, _target_rs->get_attrib_def(TexGenAttrib::get_class_slot()));
+  const TextureAttrib *target_texture = (const TextureAttrib *)
+    _target_rs->get_attrib_def(TextureAttrib::get_class_slot());
+  const TexGenAttrib *target_tex_gen = (const TexGenAttrib *)
+    _target_rs->get_attrib_def(TexGenAttrib::get_class_slot());
 
   nassertv(target_texture != (TextureAttrib *)NULL &&
            target_tex_gen != (TexGenAttrib *)NULL);

+ 17 - 0
panda/src/express/memoryUsage.I

@@ -366,3 +366,20 @@ INLINE void MemoryUsage::
 show_trend_ages() {
   get_global_ptr()->ns_show_trend_ages();
 }
+
+////////////////////////////////////////////////////////////////////
+//     Function: MemoryUsage::get_global_ptr
+//       Access: Private, Static
+//  Description: Returns the pointer to the only MemoryUsage object in
+//               the world.
+////////////////////////////////////////////////////////////////////
+INLINE MemoryUsage *MemoryUsage::
+get_global_ptr() {
+  if (_global_ptr == (MemoryUsage *)NULL) {
+    init_memory_hook();
+    _global_ptr = new MemoryUsage(*memory_hook);
+    memory_hook = _global_ptr;
+  }
+
+  return _global_ptr;
+}

+ 0 - 18
panda/src/express/memoryUsage.cxx

@@ -564,24 +564,6 @@ overflow_heap_size() {
   _report_memory_usage = true;
 }
 
-////////////////////////////////////////////////////////////////////
-//     Function: MemoryUsage::get_global_ptr
-//       Access: Private, Static
-//  Description: Returns the pointer to the only MemoryUsage object in
-//               the world.
-////////////////////////////////////////////////////////////////////
-MemoryUsage *MemoryUsage::
-get_global_ptr() {
-  if (_global_ptr == (MemoryUsage *)NULL) {
-    init_memory_hook();
-    _global_ptr = new MemoryUsage(*memory_hook);
-    memory_hook = _global_ptr;
-  }
-
-  return _global_ptr;
-}
-
-
 ////////////////////////////////////////////////////////////////////
 //     Function: MemoryUsage::ns_record_pointer
 //       Access: Private

+ 1 - 1
panda/src/express/memoryUsage.h

@@ -96,7 +96,7 @@ protected:
 
 private:
   MemoryUsage(const MemoryHook &copy);
-  static MemoryUsage *get_global_ptr();
+  INLINE static MemoryUsage *get_global_ptr();
 
   void ns_record_pointer(ReferenceCount *ptr);
   void ns_update_type(ReferenceCount *ptr, TypeHandle type);

+ 88 - 0
panda/src/express/nodePointerTo.I

@@ -51,6 +51,36 @@ INLINE NodePointerTo<T>::
 }
 #endif  // CPPPARSER
 
+#ifdef USE_MOVE_SEMANTICS
+#ifndef CPPPARSER
+////////////////////////////////////////////////////////////////////
+//     Function: NodePointerTo::Move Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE NodePointerTo<T>::
+NodePointerTo(NodePointerTo<T> &&from) NOEXCEPT :
+  NodePointerToBase<T>((NodePointerToBase<T> &&)from)
+{
+}
+#endif  // CPPPARSER
+
+#ifndef CPPPARSER
+////////////////////////////////////////////////////////////////////
+//     Function: NodePointerTo::Move Assignment Operator
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE NodePointerTo<T> &NodePointerTo<T>::
+operator = (NodePointerTo<T> &&from) NOEXCEPT {
+  this->reassign(move(from));
+  return *this;
+}
+#endif  // CPPPARSER
+#endif  // USE_MOVE_SEMANTICS
+
 #ifndef CPPPARSER
 ////////////////////////////////////////////////////////////////////
 //     Function: NodePointerTo::Dereference operator
@@ -192,6 +222,64 @@ INLINE NodeConstPointerTo<T>::
 }
 #endif  // CPPPARSER
 
+#ifdef USE_MOVE_SEMANTICS
+#ifndef CPPPARSER
+////////////////////////////////////////////////////////////////////
+//     Function: NodeConstPointerTo::Move Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE NodeConstPointerTo<T>::
+NodeConstPointerTo(NodePointerTo<T> &&from) NOEXCEPT :
+  NodePointerToBase<T>((NodePointerToBase<T> &&)from)
+{
+}
+#endif  // CPPPARSER
+
+#ifndef CPPPARSER
+////////////////////////////////////////////////////////////////////
+//     Function: NodeConstPointerTo::Move Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE NodeConstPointerTo<T>::
+NodeConstPointerTo(NodeConstPointerTo<T> &&from) NOEXCEPT :
+  NodePointerToBase<T>((NodePointerToBase<T> &&)from)
+{
+}
+#endif  // CPPPARSER
+
+#ifndef CPPPARSER
+////////////////////////////////////////////////////////////////////
+//     Function: NodeConstPointerTo::Move Assignment Operator
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE NodeConstPointerTo<T> &NodeConstPointerTo<T>::
+operator = (NodePointerTo<T> &&from) NOEXCEPT {
+  this->reassign(move(from));
+  return *this;
+}
+#endif  // CPPPARSER
+
+#ifndef CPPPARSER
+////////////////////////////////////////////////////////////////////
+//     Function: NodeConstPointerTo::Move Assignment Operator
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE NodeConstPointerTo<T> &NodeConstPointerTo<T>::
+operator = (NodeConstPointerTo<T> &&from) NOEXCEPT {
+  this->reassign(move(from));
+  return *this;
+}
+#endif  // CPPPARSER
+#endif  // USE_MOVE_SEMANTICS
+
 #ifndef CPPPARSER
 ////////////////////////////////////////////////////////////////////
 //     Function: NodeConstPointerTo::Dereference operator

+ 22 - 0
panda/src/express/nodePointerTo.h

@@ -36,6 +36,11 @@ public:
   INLINE NodePointerTo(const NodePointerTo<T> &copy);
   INLINE ~NodePointerTo();
 
+#ifdef USE_MOVE_SEMANTICS
+  INLINE NodePointerTo(NodePointerTo<T> &&from) NOEXCEPT;
+  INLINE NodePointerTo<T> &operator = (NodePointerTo<T> &&from) NOEXCEPT;
+#endif
+
   INLINE To &operator *() const;
   INLINE To *operator -> () const;
 
@@ -67,6 +72,13 @@ public:
   INLINE NodeConstPointerTo(const NodeConstPointerTo<T> &copy);
   INLINE ~NodeConstPointerTo();
 
+#ifdef USE_MOVE_SEMANTICS
+  INLINE NodeConstPointerTo(NodePointerTo<T> &&from) NOEXCEPT;
+  INLINE NodeConstPointerTo(NodeConstPointerTo<T> &&from) NOEXCEPT;
+  INLINE NodeConstPointerTo<T> &operator = (NodePointerTo<T> &&from) NOEXCEPT;
+  INLINE NodeConstPointerTo<T> &operator = (NodeConstPointerTo<T> &&from) NOEXCEPT;
+#endif
+
   INLINE const To &operator *() const;
   INLINE const To *operator -> () const;
   INLINE operator const T *() const;
@@ -79,6 +91,16 @@ public:
 #endif  // CPPPARSER
 };
 
+template <class T>
+void swap(NodePointerTo<T> &one, NodePointerTo<T> &two) NOEXCEPT {
+  one.swap(two);
+}
+
+template <class T>
+void swap(NodeConstPointerTo<T> &one, NodeConstPointerTo<T> &two) NOEXCEPT {
+  one.swap(two);
+}
+
 #define NPT(type) NodePointerTo< type >
 #define NCPT(type) NodeConstPointerTo< type >
 

+ 37 - 0
panda/src/express/nodePointerToBase.I

@@ -46,6 +46,43 @@ INLINE NodePointerToBase<T>::
   reassign((To *)NULL);
 }
 
+#ifdef USE_MOVE_SEMANTICS
+////////////////////////////////////////////////////////////////////
+//     Function: NodePointerToBase::Move Constructor
+//       Access: Protected
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE NodePointerToBase<T>::
+NodePointerToBase(NodePointerToBase<T> &&from) NOEXCEPT {
+  _void_ptr = from._void_ptr;
+  from._void_ptr = (void *)NULL;
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: NodePointerToBase::reassign
+//       Access: Protected
+//  Description: This version of reassign is called when a
+//               NodePointerTo is assigned to this Node PointerTo
+//               as an rvalue.  In this case, we can steal the
+//               reference count from the other PointerTo, without
+//               needing to call ref() and unref() unnecessarily.
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE void NodePointerToBase<T>::
+reassign(NodePointerToBase<T> &&from) NOEXCEPT {
+  To *old_ptr = (To *)this->_void_ptr;
+
+  this->_void_ptr = from._void_ptr;
+  from._void_ptr = NULL;
+
+  // Now delete the old pointer.
+  if (old_ptr != (To *)NULL) {
+    node_unref_delete(old_ptr);
+  }
+}
+#endif  // USE_MOVE_SEMANTICS
+
 ////////////////////////////////////////////////////////////////////
 //     Function: NodePointerToBase::reassign
 //       Access: Protected

+ 5 - 0
panda/src/express/nodePointerToBase.h

@@ -42,6 +42,11 @@ protected:
   INLINE NodePointerToBase(const NodePointerToBase<T> &copy);
   INLINE ~NodePointerToBase();
 
+#ifdef USE_MOVE_SEMANTICS
+  INLINE NodePointerToBase(NodePointerToBase<T> &&from) NOEXCEPT;
+  INLINE void reassign(NodePointerToBase<To> &&from) NOEXCEPT;
+#endif
+
   void reassign(To *ptr);
   INLINE void reassign(const NodePointerToBase<To> &copy);
 

+ 33 - 25
panda/src/express/pointerTo.I

@@ -43,8 +43,8 @@ PointerTo(const PointerTo<T> &copy) :
 ////////////////////////////////////////////////////////////////////
 template<class T>
 INLINE PointerTo<T>::
-PointerTo(PointerTo<T> &&move) NOEXCEPT :
-  PointerToBase<T>((PointerToBase<T> &&)move)
+PointerTo(PointerTo<T> &&from) NOEXCEPT :
+  PointerToBase<T>(move(from))
 {
 }
 
@@ -55,19 +55,11 @@ PointerTo(PointerTo<T> &&move) NOEXCEPT :
 ////////////////////////////////////////////////////////////////////
 template<class T>
 INLINE PointerTo<T> &PointerTo<T>::
-operator = (PointerTo<T> &&move) NOEXCEPT {
-  To *old_ptr = (To *)this->_void_ptr;
-
-  this->_void_ptr = move._void_ptr;
-  move._void_ptr = NULL;
-
-  if (old_ptr != (To *)NULL) {
-    unref_delete(old_ptr);
-  }
-
+operator = (PointerTo<T> &&from) NOEXCEPT {
+  this->reassign(move(from));
   return *this;
 }
-#endif
+#endif  // USE_MOVE_SEMANTICS
 
 ////////////////////////////////////////////////////////////////////
 //     Function: PointerTo::Destructor
@@ -208,8 +200,20 @@ ConstPointerTo(const ConstPointerTo<T> &copy) :
 ////////////////////////////////////////////////////////////////////
 template<class T>
 INLINE ConstPointerTo<T>::
-ConstPointerTo(ConstPointerTo<T> &&move) NOEXCEPT :
-  PointerToBase<T>((PointerToBase<T> &&)move)
+ConstPointerTo(PointerTo<T> &&from) NOEXCEPT :
+  PointerToBase<T>(move(from))
+{
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: ConstPointerTo::Move Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE ConstPointerTo<T>::
+ConstPointerTo(ConstPointerTo<T> &&from) NOEXCEPT :
+  PointerToBase<T>(move(from))
 {
 }
 
@@ -220,19 +224,23 @@ ConstPointerTo(ConstPointerTo<T> &&move) NOEXCEPT :
 ////////////////////////////////////////////////////////////////////
 template<class T>
 INLINE ConstPointerTo<T> &ConstPointerTo<T>::
-operator = (ConstPointerTo<T> &&move) NOEXCEPT {
-  To *old_ptr = (To *)this->_void_ptr;
-
-  this->_void_ptr = move._void_ptr;
-  move._void_ptr = NULL;
-
-  if (old_ptr != (To *)NULL) {
-    unref_delete(old_ptr);
-  }
+operator = (PointerTo<T> &&from) NOEXCEPT {
+  this->reassign(move(from));
+  return *this;
+}
 
+////////////////////////////////////////////////////////////////////
+//     Function: ConstPointerTo::Move Assignment Operator
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE ConstPointerTo<T> &ConstPointerTo<T>::
+operator = (ConstPointerTo<T> &&from) NOEXCEPT {
+  this->reassign(move(from));
   return *this;
 }
-#endif
+#endif  // USE_MOVE_SEMANTICS
 
 ////////////////////////////////////////////////////////////////////
 //     Function: ConstPointerTo::Dereference operator

+ 8 - 6
panda/src/express/pointerTo.h

@@ -86,8 +86,8 @@ PUBLISHED:
 
 public:
 #ifdef USE_MOVE_SEMANTICS
-  INLINE PointerTo(PointerTo<T> &&move) NOEXCEPT;
-  INLINE PointerTo<T> &operator = (PointerTo<T> &&move) NOEXCEPT;
+  INLINE PointerTo(PointerTo<T> &&from) NOEXCEPT;
+  INLINE PointerTo<T> &operator = (PointerTo<T> &&from) NOEXCEPT;
 #endif
 
   INLINE To &operator *() const;
@@ -150,8 +150,10 @@ PUBLISHED:
 
 public:
 #ifdef USE_MOVE_SEMANTICS
-  INLINE ConstPointerTo(ConstPointerTo<T> &&move) NOEXCEPT;
-  INLINE ConstPointerTo<T> &operator = (ConstPointerTo<T> &&move) NOEXCEPT;
+  INLINE ConstPointerTo(PointerTo<T> &&from) NOEXCEPT;
+  INLINE ConstPointerTo(ConstPointerTo<T> &&from) NOEXCEPT;
+  INLINE ConstPointerTo<T> &operator = (PointerTo<T> &&from) NOEXCEPT;
+  INLINE ConstPointerTo<T> &operator = (ConstPointerTo<T> &&from) NOEXCEPT;
 #endif
 
   INLINE const To &operator *() const;
@@ -177,12 +179,12 @@ PUBLISHED:
 // of PointerTo objects without incurring the cost of unnecessary
 // reference count changes.  The performance difference is dramatic!
 template <class T>
-void swap(PointerTo<T> &one, PointerTo<T> &two) {
+void swap(PointerTo<T> &one, PointerTo<T> &two) NOEXCEPT {
   one.swap(two);
 }
 
 template <class T>
-void swap(ConstPointerTo<T> &one, ConstPointerTo<T> &two) {
+void swap(ConstPointerTo<T> &one, ConstPointerTo<T> &two) NOEXCEPT {
   one.swap(two);
 }
 

+ 32 - 9
panda/src/express/pointerToBase.I

@@ -36,29 +36,52 @@ PointerToBase(const PointerToBase<T> &copy) {
 }
 
 ////////////////////////////////////////////////////////////////////
-//     Function: PointerToBase::Move Constructor
+//     Function: PointerToBase::Destructor
 //       Access: Protected
 //  Description:
 ////////////////////////////////////////////////////////////////////
-#ifdef USE_MOVE_SEMANTICS
 template<class T>
 INLINE PointerToBase<T>::
-PointerToBase(PointerToBase<T> &&move) NOEXCEPT {
-  _void_ptr = move._void_ptr;
-  move._void_ptr = (void *)NULL;
+~PointerToBase() {
+  reassign((To *)NULL);
 }
-#endif
 
+#ifdef USE_MOVE_SEMANTICS
 ////////////////////////////////////////////////////////////////////
-//     Function: PointerToBase::Destructor
+//     Function: PointerToBase::Move Constructor
 //       Access: Protected
 //  Description:
 ////////////////////////////////////////////////////////////////////
 template<class T>
 INLINE PointerToBase<T>::
-~PointerToBase() {
-  reassign((To *)NULL);
+PointerToBase(PointerToBase<T> &&from) NOEXCEPT {
+  _void_ptr = from._void_ptr;
+  from._void_ptr = (void *)NULL;
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: PointerToBase::reassign
+//       Access: Protected
+//  Description: This version of reassign is called when a PointerTo
+//               is assigned to this PointerTo as an rvalue.  In
+//               this case, we can steal the reference count from
+//               the other PointerTo, without needing to call ref()
+//               and unref() unnecessarily.
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE void PointerToBase<T>::
+reassign(PointerToBase<T> &&from) NOEXCEPT {
+  To *old_ptr = (To *)this->_void_ptr;
+
+  this->_void_ptr = from._void_ptr;
+  from._void_ptr = NULL;
+
+  // Now delete the old pointer.
+  if (old_ptr != (To *)NULL) {
+    unref_delete(old_ptr);
+  }
 }
+#endif  // USE_MOVE_SEMANTICS
 
 ////////////////////////////////////////////////////////////////////
 //     Function: PointerToBase::reassign

+ 2 - 1
panda/src/express/pointerToBase.h

@@ -39,7 +39,8 @@ protected:
   INLINE ~PointerToBase();
 
 #ifdef USE_MOVE_SEMANTICS
-  INLINE PointerToBase(PointerToBase<T> &&move) NOEXCEPT;
+  INLINE PointerToBase(PointerToBase<T> &&from) NOEXCEPT;
+  INLINE void reassign(PointerToBase<To> &&from) NOEXCEPT;
 #endif
 
   INLINE void reassign(To *ptr);

+ 7 - 2
panda/src/express/pointerToVoid.I

@@ -110,8 +110,13 @@ operator != (const PointerToVoid &other) const {
 //       Access: Public
 //  Description: Swaps the contents of this PointerTo with the other,
 //               without touching the reference counts.
+//
+//               For internal use only.  Use the global swap()
+//               function instead.
 ////////////////////////////////////////////////////////////////////
 INLINE void PointerToVoid::
-swap(PointerToVoid &other) {
-  std::swap(_void_ptr, other._void_ptr);
+swap(PointerToVoid &other) NOEXCEPT {
+  AtomicAdjust::Pointer temp = _void_ptr;
+  _void_ptr = other._void_ptr;
+  other._void_ptr = temp;
 }

+ 1 - 1
panda/src/express/pointerToVoid.h

@@ -54,7 +54,7 @@ public:
   INLINE bool operator == (const PointerToVoid &other) const;
   INLINE bool operator != (const PointerToVoid &other) const;
 
-  INLINE void swap(PointerToVoid &other);
+  INLINE void swap(PointerToVoid &other) NOEXCEPT;
 
 protected:
   // Within the PointerToVoid class, we only store a void pointer.

+ 107 - 33
panda/src/glstuff/glGraphicsStateGuardian_src.cxx

@@ -316,6 +316,7 @@ CLP(GraphicsStateGuardian)(GraphicsEngine *engine, GraphicsPipe *pipe) :
   _force_flush = gl_force_flush;
 
   _scissor_enabled = false;
+  _scissor_attrib_active = false;
 
 #ifdef DO_PSTATS
   if (gl_finish) {
@@ -2287,6 +2288,7 @@ clear(DrawableRegion *clearable) {
     return;
   }
 
+  //XXX rdb: Is this line really necessary?
   set_state_and_transform(RenderState::make_empty(), _internal_transform);
 
   int mask = 0;
@@ -2468,6 +2470,8 @@ prepare_display_region(DisplayRegionPipelineReader *dr) {
   _draw_buffer_type |= _current_properties->get_aux_mask();
   set_draw_buffer(_draw_buffer_type);
 
+  int count = dr->get_num_regions();
+
   if (dr->get_scissor_enabled()) {
     if (GLCAT.is_spam()) {
       GLCAT.spam()
@@ -2475,6 +2479,7 @@ prepare_display_region(DisplayRegionPipelineReader *dr) {
     }
     glEnable(GL_SCISSOR_TEST);
     _scissor_enabled = true;
+    _scissor_array.resize(count);
   } else {
     if (GLCAT.is_spam()) {
       GLCAT.spam()
@@ -2482,27 +2487,35 @@ prepare_display_region(DisplayRegionPipelineReader *dr) {
     }
     glDisable(GL_SCISSOR_TEST);
     _scissor_enabled = false;
+    _scissor_array.clear();
   }
 
+  _scissor_attrib_active = false;
+
 #ifndef OPENGLES
   if (_supports_viewport_arrays) {
-    int count = dr->get_num_regions();
+
     GLfloat *viewports = (GLfloat *)alloca(sizeof(GLfloat) * 4 * count);
-    GLint *scissors = (GLint *)alloca(sizeof(GLint) * 4 * count);
 
+    // We store the scissor regions in a vector since we may need
+    // to switch back to it in do_issue_scissor.
     for (int i = 0; i < count; ++i) {
-      GLint *sr = scissors + i * 4;
+      LVecBase4i sr;
       dr->get_region_pixels(i, sr[0], sr[1], sr[2], sr[3]);
       GLfloat *vr = viewports + i * 4;
       vr[0] = (GLfloat) sr[0];
       vr[1] = (GLfloat) sr[1];
       vr[2] = (GLfloat) sr[2];
       vr[3] = (GLfloat) sr[3];
+      if (_scissor_enabled) {
+        _scissor_array[i] = sr;
+      }
     }
     _glViewportArrayv(0, count, viewports);
-    if (dr->get_scissor_enabled()) {
-      _glScissorArrayv(0, count, scissors);
+    if (_scissor_enabled) {
+      _glScissorArrayv(0, count, _scissor_array[0].get_data());
     }
+
     if (GLCAT.is_spam()) {
       GLCAT.spam()
         << "glViewportArrayv(0, " << count << ", [\n";
@@ -2511,12 +2524,12 @@ prepare_display_region(DisplayRegionPipelineReader *dr) {
         GLCAT.spam(false) << vr[0] << ", " << vr[1] << ", " << vr[2] << ", " << vr[3] << ",\n";
       }
       GLCAT.spam(false) << "])\n";
-      if (dr->get_scissor_enabled()) {
+      if (_scissor_enabled) {
         GLCAT.spam()
           << "glScissorArrayv(0, " << count << ", [\n";
         for (int i = 0; i < count; ++i) {
-          GLint *sr = scissors + i * 4;
-          GLCAT.spam(false) << sr[0] << ", " << sr[1] << ", " << sr[2] << ", " << sr[3] << ",\n";
+          const LVecBase4i &sr = _scissor_array[i];
+          GLCAT.spam(false) << sr << ",\n";
         }
       }
       GLCAT.spam(false) << "])\n";
@@ -2526,9 +2539,13 @@ prepare_display_region(DisplayRegionPipelineReader *dr) {
 #endif  // OPENGLES
   {
     glViewport(x, y, width, height);
-    if (dr->get_scissor_enabled()) {
+    if (_scissor_enabled) {
       glScissor(x, y, width, height);
+
+      _scissor_array.resize(1);
+      _scissor_array[0].set(x, y, width, height);
     }
+
     if (GLCAT.is_spam()) {
       GLCAT.spam()
         << "glViewport(" << x << ", " << y << ", " << width << ", " << height << ")\n";
@@ -2826,7 +2843,7 @@ end_frame(Thread *current_thread) {
 
   // Now is a good time to delete any pending display lists.
 #ifndef OPENGLES
-  {
+  if (display_lists) {
     LightMutexHolder holder(_lock);
     if (!_deleted_display_lists.empty()) {
       DeletedNames::iterator ddli;
@@ -2841,10 +2858,13 @@ end_frame(Thread *current_thread) {
       }
       _deleted_display_lists.clear();
     }
+  }
 
-    // And deleted queries, too, unless we're using query timers
-    // in which case we'll need to reuse lots of them.
-    if (!get_timer_queries_active() && !_deleted_queries.empty()) {
+  // And deleted queries, too, unless we're using query timers
+  // in which case we'll need to reuse lots of them.
+  if (_supports_occlusion_query && !get_timer_queries_active()) {
+    LightMutexHolder holder(_lock);
+    if (!_deleted_queries.empty()) {
       if (GLCAT.is_spam()) {
         DeletedNames::iterator dqi;
         for (dqi = _deleted_queries.begin();
@@ -5106,9 +5126,9 @@ issue_timer_query(int pstats_index) {
   // Issue the timestamp query.
   _glQueryCounter(query->_index, GL_TIMESTAMP);
 
-  _pending_timer_queries.push_back(DCAST(TimerQueryContext, query));
+  _pending_timer_queries.push_back((TimerQueryContext *)query);
 
-  return DCAST(TimerQueryContext, query);
+  return (TimerQueryContext *)query;
 
 #else
   return NULL;
@@ -5621,7 +5641,9 @@ do_issue_transform() {
 void CLP(GraphicsStateGuardian)::
 do_issue_shade_model() {
 #ifndef OPENGLES_2
-  const ShadeModelAttrib *target_shade_model = DCAST(ShadeModelAttrib, _target_rs->get_attrib_def(ShadeModelAttrib::get_class_slot()));
+  const ShadeModelAttrib *target_shade_model = (const ShadeModelAttrib *)
+    _target_rs->get_attrib_def(ShadeModelAttrib::get_class_slot());
+
   switch (target_shade_model->get_mode()) {
   case ShadeModelAttrib::M_smooth:
     glShadeModel(GL_SMOOTH);
@@ -5704,7 +5726,9 @@ do_issue_shader(bool state_has_changed) {
 ////////////////////////////////////////////////////////////////////
 void CLP(GraphicsStateGuardian)::
 do_issue_render_mode() {
-  const RenderModeAttrib *target_render_mode = DCAST(RenderModeAttrib, _target_rs->get_attrib_def(RenderModeAttrib::get_class_slot()));
+  const RenderModeAttrib *target_render_mode = (const RenderModeAttrib *)
+    _target_rs->get_attrib_def(RenderModeAttrib::get_class_slot());
+
   _render_mode = target_render_mode->get_mode();
   _point_size = target_render_mode->get_thickness();
   _point_perspective = target_render_mode->get_perspective();
@@ -5748,7 +5772,9 @@ do_issue_render_mode() {
 ////////////////////////////////////////////////////////////////////
 void CLP(GraphicsStateGuardian)::
 do_issue_antialias() {
-  const AntialiasAttrib *target_antialias = DCAST(AntialiasAttrib, _target_rs->get_attrib_def(AntialiasAttrib::get_class_slot()));
+  const AntialiasAttrib *target_antialias = (const AntialiasAttrib *)
+    _target_rs->get_attrib_def(AntialiasAttrib::get_class_slot());
+
   if (target_antialias->get_mode_type() == AntialiasAttrib::M_auto) {
     // In this special mode, we must enable antialiasing on a
     // case-by-case basis, because we enable it differently for
@@ -5814,7 +5840,9 @@ do_issue_antialias() {
 void CLP(GraphicsStateGuardian)::
 do_issue_rescale_normal() {
 #ifndef OPENGLES_2 // OpenGL ES 2.0 doesn't support rescaling normals.
-  const RescaleNormalAttrib *target_rescale_normal = DCAST(RescaleNormalAttrib, _target_rs->get_attrib_def(RescaleNormalAttrib::get_class_slot()));
+  const RescaleNormalAttrib *target_rescale_normal = (const RescaleNormalAttrib *)
+    _target_rs->get_attrib_def(RescaleNormalAttrib::get_class_slot());
+
   RescaleNormalAttrib::Mode mode = target_rescale_normal->get_mode();
 
   _auto_rescale_normal = false;
@@ -5866,7 +5894,9 @@ do_issue_rescale_normal() {
 ////////////////////////////////////////////////////////////////////
 void CLP(GraphicsStateGuardian)::
 do_issue_depth_test() {
-  const DepthTestAttrib *target_depth_test = DCAST(DepthTestAttrib, _target_rs->get_attrib_def(DepthTestAttrib::get_class_slot()));
+  const DepthTestAttrib *target_depth_test = (const DepthTestAttrib *)
+    _target_rs->get_attrib_def(DepthTestAttrib::get_class_slot());
+
   DepthTestAttrib::PandaCompareFunc mode = target_depth_test->get_mode();
   if (mode == DepthTestAttrib::M_none) {
     enable_depth_test(false);
@@ -5887,7 +5917,9 @@ do_issue_alpha_test() {
   if (_target_shader->get_flag(ShaderAttrib::F_subsume_alpha_test)) {
     enable_alpha_test(false);
   } else {
-    const AlphaTestAttrib *target_alpha_test = DCAST(AlphaTestAttrib, _target_rs->get_attrib_def(AlphaTestAttrib::get_class_slot()));
+    const AlphaTestAttrib *target_alpha_test = (const AlphaTestAttrib *)
+      _target_rs->get_attrib_def(AlphaTestAttrib::get_class_slot());
+
     AlphaTestAttrib::PandaCompareFunc mode = target_alpha_test->get_mode();
     if (mode == AlphaTestAttrib::M_none) {
       enable_alpha_test(false);
@@ -5908,7 +5940,9 @@ do_issue_alpha_test() {
 ////////////////////////////////////////////////////////////////////
 void CLP(GraphicsStateGuardian)::
 do_issue_depth_write() {
-  const DepthWriteAttrib *target_depth_write = DCAST(DepthWriteAttrib, _target_rs->get_attrib_def(DepthWriteAttrib::get_class_slot()));
+  const DepthWriteAttrib *target_depth_write = (const DepthWriteAttrib *)
+    _target_rs->get_attrib_def(DepthWriteAttrib::get_class_slot());
+
   DepthWriteAttrib::Mode mode = target_depth_write->get_mode();
   if (mode == DepthWriteAttrib::M_off) {
 #ifdef GSG_VERBOSE
@@ -5933,7 +5967,9 @@ do_issue_depth_write() {
 ////////////////////////////////////////////////////////////////////
 void CLP(GraphicsStateGuardian)::
 do_issue_cull_face() {
-  const CullFaceAttrib *target_cull_face = DCAST(CullFaceAttrib, _target_rs->get_attrib_def(CullFaceAttrib::get_class_slot()));
+  const CullFaceAttrib *target_cull_face = (const CullFaceAttrib *)
+    _target_rs->get_attrib_def(CullFaceAttrib::get_class_slot());
+
   CullFaceAttrib::Mode mode = target_cull_face->get_effective_mode();
 
   switch (mode) {
@@ -5963,7 +5999,9 @@ do_issue_cull_face() {
 ////////////////////////////////////////////////////////////////////
 void CLP(GraphicsStateGuardian)::
 do_issue_fog() {
-  const FogAttrib *target_fog = DCAST(FogAttrib, _target_rs->get_attrib_def(FogAttrib::get_class_slot()));
+  const FogAttrib *target_fog = (const FogAttrib *)
+    _target_rs->get_attrib_def(FogAttrib::get_class_slot());
+
   if (!target_fog->is_off()) {
     enable_fog(true);
     Fog *fog = target_fog->get_fog();
@@ -5982,7 +6020,9 @@ do_issue_fog() {
 ////////////////////////////////////////////////////////////////////
 void CLP(GraphicsStateGuardian)::
 do_issue_depth_offset() {
-  const DepthOffsetAttrib *target_depth_offset = DCAST(DepthOffsetAttrib, _target_rs->get_attrib_def(DepthOffsetAttrib::get_class_slot()));
+  const DepthOffsetAttrib *target_depth_offset = (const DepthOffsetAttrib *)
+     _target_rs->get_attrib_def(DepthOffsetAttrib::get_class_slot());
+
   int offset = target_depth_offset->get_offset();
 
   if (offset != 0) {
@@ -6023,7 +6063,8 @@ do_issue_material() {
   static Material empty;
   const Material *material;
 
-  const MaterialAttrib *target_material = DCAST(MaterialAttrib, _target_rs->get_attrib_def(MaterialAttrib::get_class_slot()));
+  const MaterialAttrib *target_material = (const MaterialAttrib *)
+    _target_rs->get_attrib_def(MaterialAttrib::get_class_slot());
 
   if (target_material == (MaterialAttrib *)NULL ||
       target_material->is_off()) {
@@ -6130,7 +6171,8 @@ do_issue_blending() {
   // all the other blending-related stuff doesn't matter.  If the
   // device doesn't support color-write, we use blending tricks
   // to effectively disable color write.
-  const ColorWriteAttrib *target_color_write = DCAST(ColorWriteAttrib, _target_rs->get_attrib_def(ColorWriteAttrib::get_class_slot()));
+  const ColorWriteAttrib *target_color_write = (const ColorWriteAttrib *)
+    _target_rs->get_attrib_def(ColorWriteAttrib::get_class_slot());
 
   unsigned int color_channels =
     target_color_write->get_channels() & _color_write_mask;
@@ -6160,11 +6202,13 @@ do_issue_blending() {
   }
 
 
-  const ColorBlendAttrib *target_color_blend = DCAST(ColorBlendAttrib, _target_rs->get_attrib_def(ColorBlendAttrib::get_class_slot()));
+  const ColorBlendAttrib *target_color_blend = (const ColorBlendAttrib *)
+    _target_rs->get_attrib_def(ColorBlendAttrib::get_class_slot());
   CPT(ColorBlendAttrib) color_blend = target_color_blend;
   ColorBlendAttrib::Mode color_blend_mode = target_color_blend->get_mode();
 
-  const TransparencyAttrib *target_transparency = DCAST(TransparencyAttrib, _target_rs->get_attrib_def(TransparencyAttrib::get_class_slot()));
+  const TransparencyAttrib *target_transparency = (const TransparencyAttrib *)
+    _target_rs->get_attrib_def(TransparencyAttrib::get_class_slot());
   TransparencyAttrib::Mode transparency_mode = target_transparency->get_mode();
 
   _color_blend_involves_color_scale = color_blend->involves_color_scale();
@@ -8715,7 +8759,8 @@ set_state_and_transform(const RenderState *target,
   }
   _target_rs = target;
 
-  _target_shader = DCAST(ShaderAttrib, _target_rs->get_attrib_def(ShaderAttrib::get_class_slot()));
+  _target_shader = (const ShaderAttrib *)
+    _target_rs->get_attrib_def(ShaderAttrib::get_class_slot());
 #ifndef OPENGLES
   _instance_count = _target_shader->get_instance_count();
 #endif
@@ -9542,7 +9587,9 @@ do_issue_tex_matrix() {
 
     glMatrixMode(GL_TEXTURE);
 
-    const TexMatrixAttrib *target_tex_matrix = DCAST(TexMatrixAttrib, _target_rs->get_attrib_def(TexMatrixAttrib::get_class_slot()));
+    const TexMatrixAttrib *target_tex_matrix = (const TexMatrixAttrib *)
+      _target_rs->get_attrib_def(TexMatrixAttrib::get_class_slot());
+
     if (target_tex_matrix->has_stage(stage)) {
       GLPf(LoadMatrix)(target_tex_matrix->get_mat(stage).get_data());
     } else {
@@ -11870,7 +11917,8 @@ do_issue_stencil() {
     return;
   }
 
-  const StencilAttrib *stencil = DCAST(StencilAttrib, _target_rs->get_attrib(StencilAttrib::get_class_slot()));
+  const StencilAttrib *stencil = (const StencilAttrib *)
+    _target_rs->get_attrib(StencilAttrib::get_class_slot());
 
   if (stencil != (const StencilAttrib *)NULL) {
     // DEBUG
@@ -11958,7 +12006,8 @@ do_issue_stencil() {
 ////////////////////////////////////////////////////////////////////
 void CLP(GraphicsStateGuardian)::
 do_issue_scissor() {
-  const ScissorAttrib *target_scissor = DCAST(ScissorAttrib, _target_rs->get_attrib_def(ScissorAttrib::get_class_slot()));
+  const ScissorAttrib *target_scissor = (const ScissorAttrib *)
+    _target_rs->get_attrib_def(ScissorAttrib::get_class_slot());
 
   if (!target_scissor->is_off()) {
     // A non-off ScissorAttrib means to override the scissor setting
@@ -11984,5 +12033,30 @@ do_issue_scissor() {
         << "glScissor(" << x << ", " << y << ", " << width << ", " << height << ")\n";
     }
     glScissor(x, y, width, height);
+
+    _scissor_attrib_active = true;
+
+  } else if (_scissor_attrib_active) {
+    _scissor_attrib_active = false;
+
+    if (_scissor_array.size() > 0) {
+      // Scissoring is enabled on the display region.
+      // Revert to the scissor state specified in the DisplayRegion.
+      if (_supports_viewport_arrays) {
+        _glScissorArrayv(0, _scissor_array.size(), _scissor_array[0].get_data());
+      } else {
+        const LVecBase4i sr = _scissor_array[0];
+        glScissor(sr[0], sr[1], sr[2], sr[3]);
+      }
+
+    } else if (_scissor_enabled) {
+      // The display region had no scissor enabled.  Disable scissoring.
+      if (GLCAT.is_spam()) {
+        GLCAT.spam()
+          << "glDisable(GL_SCISSOR_TEST)\n";
+      }
+      glDisable(GL_SCISSOR_TEST);
+      _scissor_enabled = false;
+    }
   }
 }

+ 2 - 0
panda/src/glstuff/glGraphicsStateGuardian_src.h

@@ -550,6 +550,8 @@ protected:
   bool _point_perspective;
   bool _vertex_blending_enabled;
   bool _scissor_enabled;
+  bool _scissor_attrib_active;
+  epvector<LVecBase4i> _scissor_array;
 
 #ifndef OPENGLES_1
   PT(Shader) _current_shader;

+ 72 - 21
panda/src/gobj/geom.I

@@ -380,14 +380,14 @@ clear_bounds() {
 ////////////////////////////////////////////////////////////////////
 INLINE void Geom::
 calc_tight_bounds(LPoint3 &min_point, LPoint3 &max_point,
-                  bool &found_any, 
+                  bool &found_any,
                   const GeomVertexData *vertex_data,
                   bool got_mat, const LMatrix4 &mat,
                   Thread *current_thread) const {
   CDReader cdata(_cycler, current_thread);
-  
-  do_calc_tight_bounds(min_point, max_point, found_any, 
-                       vertex_data, got_mat, mat, 
+
+  do_calc_tight_bounds(min_point, max_point, found_any,
+                       vertex_data, got_mat, mat,
                        InternalName::get_vertex(),
                        cdata, current_thread);
 }
@@ -409,7 +409,7 @@ INLINE void Geom::
 calc_tight_bounds(LPoint3 &min_point, LPoint3 &max_point,
                   bool &found_any, Thread *current_thread) const {
   calc_tight_bounds(min_point, max_point, found_any,
-                    get_vertex_data(current_thread), false, 
+                    get_vertex_data(current_thread), false,
                     LMatrix4::ident_mat(),
                     current_thread);
 }
@@ -422,15 +422,15 @@ calc_tight_bounds(LPoint3 &min_point, LPoint3 &max_point,
 ////////////////////////////////////////////////////////////////////
 INLINE void Geom::
 calc_tight_bounds(LPoint3 &min_point, LPoint3 &max_point,
-                  bool &found_any, 
+                  bool &found_any,
                   const GeomVertexData *vertex_data,
                   bool got_mat, const LMatrix4 &mat,
                   const InternalName *column_name,
                   Thread *current_thread) const {
   CDReader cdata(_cycler, current_thread);
-  
-  do_calc_tight_bounds(min_point, max_point, found_any, 
-                       vertex_data, got_mat, mat, 
+
+  do_calc_tight_bounds(min_point, max_point, found_any,
+                       vertex_data, got_mat, mat,
                        column_name, cdata, current_thread);
 }
 
@@ -450,7 +450,7 @@ mark_internal_bounds_stale(CData *cdata) {
 ////////////////////////////////////////////////////////////////////
 //     Function: Geom::CDataCache::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE Geom::CDataCache::
 CDataCache() :
@@ -463,7 +463,7 @@ CDataCache() :
 ////////////////////////////////////////////////////////////////////
 //     Function: Geom::CDataCache::Copy Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE Geom::CDataCache::
 CDataCache(const Geom::CDataCache &copy) :
@@ -500,7 +500,7 @@ set_result(const Geom *geom_result, const GeomVertexData *data_result) {
 ////////////////////////////////////////////////////////////////////
 //     Function: Geom::CacheKey::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE Geom::CacheKey::
 CacheKey(const GeomVertexData *source_data, const GeomMunger *modifier) :
@@ -509,6 +509,32 @@ CacheKey(const GeomVertexData *source_data, const GeomMunger *modifier) :
 {
 }
 
+////////////////////////////////////////////////////////////////////
+//     Function: Geom::CacheKey::Copy Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+INLINE Geom::CacheKey::
+CacheKey(const CacheKey &copy) :
+  _source_data(copy._source_data),
+  _modifier(copy._modifier)
+{
+}
+
+#ifdef USE_MOVE_SEMANTICS
+////////////////////////////////////////////////////////////////////
+//     Function: Geom::CacheKey::Copy Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+INLINE Geom::CacheKey::
+CacheKey(CacheKey &&from) NOEXCEPT :
+  _source_data(move(from._source_data)),
+  _modifier(move(from._modifier))
+{
+}
+#endif  // USE_MOVE_SEMANTICS
+
 ////////////////////////////////////////////////////////////////////
 //     Function: Geom::CacheKey::operator <
 //       Access: Public
@@ -531,7 +557,7 @@ operator < (const CacheKey &other) const {
 ////////////////////////////////////////////////////////////////////
 //     Function: Geom::CacheEntry::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE Geom::CacheEntry::
 CacheEntry(Geom *source, const GeomVertexData *source_data,
@@ -541,6 +567,31 @@ CacheEntry(Geom *source, const GeomVertexData *source_data,
 {
 }
 
+////////////////////////////////////////////////////////////////////
+//     Function: Geom::CacheEntry::Copy Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+INLINE Geom::CacheEntry::
+CacheEntry(Geom *source, const Geom::CacheKey &key) :
+  _source(source),
+  _key(key)
+{
+}
+
+#ifdef USE_MOVE_SEMANTICS
+////////////////////////////////////////////////////////////////////
+//     Function: Geom::CacheEntry::Move Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+INLINE Geom::CacheEntry::
+CacheEntry(Geom *source, Geom::CacheKey &&key) NOEXCEPT :
+  _source(source),
+  _key(move(key))
+{
+}
+#endif  // USE_MOVE_SEMANTICS
 
 ////////////////////////////////////////////////////////////////////
 //     Function: Geom::CData::Constructor
@@ -667,7 +718,7 @@ get_current_thread() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomPipelineReader::get_primitive_type
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomPipelineReader::PrimitiveType GeomPipelineReader::
 get_primitive_type() const {
@@ -677,7 +728,7 @@ get_primitive_type() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomPipelineReader::get_shade_model
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomPipelineReader::ShadeModel GeomPipelineReader::
 get_shade_model() const {
@@ -687,7 +738,7 @@ get_shade_model() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomPipelineReader::get_geom_rendering
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE int GeomPipelineReader::
 get_geom_rendering() const {
@@ -697,7 +748,7 @@ get_geom_rendering() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomPipelineReader::get_usage_hint
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomPipelineReader::UsageHint GeomPipelineReader::
 get_usage_hint() const {
@@ -708,7 +759,7 @@ get_usage_hint() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomPipelineReader::get_vertex_data
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE CPT(GeomVertexData) GeomPipelineReader::
 get_vertex_data() const {
@@ -718,7 +769,7 @@ get_vertex_data() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomPipelineReader::get_num_primitives
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE int GeomPipelineReader::
 get_num_primitives() const {
@@ -728,7 +779,7 @@ get_num_primitives() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomPipelineReader::get_primitive
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE CPT(GeomPrimitive) GeomPipelineReader::
 get_primitive(int i) const {
@@ -739,7 +790,7 @@ get_primitive(int i) const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomPipelineReader::get_modified
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE UpdateSeq GeomPipelineReader::
 get_modified() const {

+ 23 - 15
panda/src/gobj/geom.h

@@ -142,24 +142,24 @@ PUBLISHED:
   bool release(PreparedGraphicsObjects *prepared_objects);
   int release_all();
 
-  GeomContext *prepare_now(PreparedGraphicsObjects *prepared_objects, 
+  GeomContext *prepare_now(PreparedGraphicsObjects *prepared_objects,
                            GraphicsStateGuardianBase *gsg);
 
 public:
-  bool draw(GraphicsStateGuardianBase *gsg, 
+  bool draw(GraphicsStateGuardianBase *gsg,
             const GeomMunger *munger,
             const GeomVertexData *vertex_data,
             bool force, Thread *current_thread) const;
-  
+
   INLINE void calc_tight_bounds(LPoint3 &min_point, LPoint3 &max_point,
-                                bool &found_any, 
+                                bool &found_any,
                                 const GeomVertexData *vertex_data,
                                 bool got_mat, const LMatrix4 &mat,
                                 Thread *current_thread) const;
   INLINE void calc_tight_bounds(LPoint3 &min_point, LPoint3 &max_point,
                                 bool &found_any, Thread *current_thread) const;
   INLINE void calc_tight_bounds(LPoint3 &min_point, LPoint3 &max_point,
-                                bool &found_any, 
+                                bool &found_any,
                                 const GeomVertexData *vertex_data,
                                 bool got_mat, const LMatrix4 &mat,
                                 const InternalName *column_name,
@@ -174,7 +174,7 @@ private:
   void compute_internal_bounds(CData *cdata, Thread *current_thread) const;
 
   void do_calc_tight_bounds(LPoint3 &min_point, LPoint3 &max_point,
-                            bool &found_any, 
+                            bool &found_any,
                             const GeomVertexData *vertex_data,
                             bool got_mat, const LMatrix4 &mat,
                             const InternalName *column_name,
@@ -220,7 +220,7 @@ private:
     Geom *_source;  // A back pointer to the containing Geom
     const Geom *_geom_result;  // ref-counted if not NULL and not same as _source
     CPT(GeomVertexData) _data_result;
-    
+
   public:
     static TypeHandle get_class_type() {
       return _type_handle;
@@ -228,7 +228,7 @@ private:
     static void init_type() {
       register_type(_type_handle, "Geom::CDataCache");
     }
-    
+
   private:
     static TypeHandle _type_handle;
   };
@@ -245,6 +245,10 @@ public:
   public:
     INLINE CacheKey(const GeomVertexData *source_data,
                     const GeomMunger *modifier);
+    INLINE CacheKey(const CacheKey &copy);
+#ifdef USE_MOVE_SEMANTICS
+    INLINE CacheKey(CacheKey &&from) NOEXCEPT;
+#endif
     INLINE bool operator < (const CacheKey &other) const;
 
     CPT(GeomVertexData) _source_data;
@@ -253,9 +257,13 @@ public:
   // It is not clear why MSVC7 needs this class to be public.
   class EXPCL_PANDA_GOBJ CacheEntry : public GeomCacheEntry {
   public:
-    INLINE CacheEntry(Geom *source, 
+    INLINE CacheEntry(Geom *source,
                       const GeomVertexData *source_data,
                       const GeomMunger *modifier);
+    INLINE CacheEntry(Geom *source, const CacheKey &key);
+#ifdef USE_MOVE_SEMANTICS
+    INLINE CacheEntry(Geom *source, CacheKey &&key) NOEXCEPT;
+#endif
     ALLOC_DELETED_CHAIN(CacheEntry);
 
     virtual void evict_callback();
@@ -265,7 +273,7 @@ public:
     CacheKey _key;
 
     PipelineCycler<CDataCache> _cycler;
-    
+
   public:
     static TypeHandle get_class_type() {
       return _type_handle;
@@ -275,7 +283,7 @@ public:
       register_type(_type_handle, "Geom::CacheEntry",
                     GeomCacheEntry::get_class_type());
     }
-    
+
   private:
     static TypeHandle _type_handle;
   };
@@ -304,13 +312,13 @@ private:
     UsageHint _usage_hint;
     bool _got_usage_hint;
     UpdateSeq _modified;
-  
+
     CPT(BoundingVolume) _internal_bounds;
     int _nested_vertices;
     bool _internal_bounds_stale;
     BoundingVolume::BoundsType _bounds_type;
     CPT(BoundingVolume) _user_bounds;
-    
+
   public:
     static TypeHandle get_class_type() {
       return _type_handle;
@@ -318,11 +326,11 @@ private:
     static void init_type() {
       register_type(_type_handle, "Geom::CData");
     }
-    
+
   private:
     static TypeHandle _type_handle;
   };
- 
+
   PipelineCycler<CData> _cycler;
   typedef CycleDataLockedReader<CData> CDLockedReader;
   typedef CycleDataReader<CData> CDReader;

+ 23 - 19
panda/src/gobj/geomMunger.cxx

@@ -27,7 +27,7 @@ PStatCollector GeomMunger::_munge_pcollector("*:Munge");
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomMunger::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 GeomMunger::
 GeomMunger(GraphicsStateGuardianBase *gsg) :
@@ -44,7 +44,7 @@ GeomMunger(GraphicsStateGuardianBase *gsg) :
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomMunger::Copy Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 GeomMunger::
 GeomMunger(const GeomMunger &copy) :
@@ -60,7 +60,7 @@ GeomMunger(const GeomMunger &copy) :
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomMunger::Copy Assignment Operator
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void GeomMunger::
 operator = (const GeomMunger &copy) {
@@ -70,7 +70,7 @@ operator = (const GeomMunger &copy) {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomMunger::Destructor
 //       Access: Public, Virtual
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 GeomMunger::
 ~GeomMunger() {
@@ -96,7 +96,7 @@ remove_data(const GeomVertexData *data) {
 //       Access: Public
 //  Description: Applies the indicated munger to the geom and its
 //               data, and returns a (possibly different) geom and
-//               data, according to the munger's whim.  
+//               data, according to the munger's whim.
 //
 //               The assumption is that for a particular geom and a
 //               particular munger, the result will always be the
@@ -110,13 +110,12 @@ remove_data(const GeomVertexData *data) {
 bool GeomMunger::
 munge_geom(CPT(Geom) &geom, CPT(GeomVertexData) &data,
            bool force, Thread *current_thread) {
-  CPT(GeomVertexData) source_data = data;
 
   // Look up the munger in the geom's cache--maybe we've recently
   // applied it.
   PT(Geom::CacheEntry) entry;
 
-  Geom::CacheKey key(source_data, this);
+  Geom::CacheKey key(data, this);
 
   geom->_cache_lock.acquire();
   Geom::Cache::const_iterator ci = geom->_cache.find(&key);
@@ -126,12 +125,12 @@ munge_geom(CPT(Geom) &geom, CPT(GeomVertexData) &data,
     entry = (*ci).second;
     geom->_cache_lock.release();
     nassertr(entry->_source == geom, false);
-    
+
     // Here's an element in the cache for this computation.  Record a
     // cache hit, so this element will stay in the cache a while
     // longer.
     entry->refresh(current_thread);
-    
+
     // Now check that it's fresh.
     Geom::CDCacheReader cdata(entry->_cycler, current_thread);
     if (cdata->_source == geom &&
@@ -139,12 +138,12 @@ munge_geom(CPT(Geom) &geom, CPT(GeomVertexData) &data,
         geom->get_modified(current_thread) <= cdata->_geom_result->get_modified(current_thread) &&
         data->get_modified(current_thread) <= cdata->_data_result->get_modified(current_thread)) {
       // The cache entry is still good; use it.
-      
+
       geom = cdata->_geom_result;
       data = cdata->_data_result;
       return true;
     }
-    
+
     // The cache entry is stale, but we'll recompute it below.  Note
     // that there's a small race condition here; another thread might
     // recompute the cache at the same time.  No big deal, since it'll
@@ -166,7 +165,12 @@ munge_geom(CPT(Geom) &geom, CPT(GeomVertexData) &data,
   // Record the new result in the cache.
   if (entry == (Geom::CacheEntry *)NULL) {
     // Create a new entry for the result.
-    entry = new Geom::CacheEntry(orig_geom, source_data, this);
+#ifdef USE_MOVE_SEMANTICS
+    // We don't need the key anymore, move the pointers into the CacheEntry.
+    entry = new Geom::CacheEntry(orig_geom, move(key));
+#else
+    entry = new Geom::CacheEntry(orig_geom, key);
+#endif
     {
       LightMutexHolder holder(orig_geom->_cache_lock);
       bool inserted = orig_geom->_cache.insert(Geom::Cache::value_type(&entry->_key, entry)).second;
@@ -176,7 +180,7 @@ munge_geom(CPT(Geom) &geom, CPT(GeomVertexData) &data,
         return true;
       }
     }
-  
+
     // And tell the cache manager about the new entry.  (It might
     // immediately request a delete from the cache of the thing we
     // just added.)
@@ -198,7 +202,7 @@ munge_geom(CPT(Geom) &geom, CPT(GeomVertexData) &data,
 //               exists just to cast away the const pointer.
 ////////////////////////////////////////////////////////////////////
 CPT(GeomVertexFormat) GeomMunger::
-do_munge_format(const GeomVertexFormat *format, 
+do_munge_format(const GeomVertexFormat *format,
                 const GeomVertexAnimationSpec &animation) {
   nassertr(_is_registered, NULL);
   nassertr(format->is_registered(), NULL);
@@ -248,7 +252,7 @@ munge_data_impl(const GeomVertexData *data) {
   nassertr(_is_registered, NULL);
 
   CPT(GeomVertexFormat) orig_format = data->get_format();
-  CPT(GeomVertexFormat) new_format = 
+  CPT(GeomVertexFormat) new_format =
     munge_format(orig_format, orig_format->get_animation());
 
   if (new_format == orig_format) {
@@ -384,7 +388,7 @@ make_registry() {
     _registry = new Registry;
   }
 }
- 
+
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomMunger::do_register
 //       Access: Private
@@ -408,7 +412,7 @@ do_register(Thread *current_thread) {
 
   _is_registered = true;
 }
- 
+
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomMunger::do_unregister
 //       Access: Private
@@ -430,7 +434,7 @@ do_unregister() {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomMunger::CacheEntry::output
 //       Access: Public, Virtual
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void GeomMunger::CacheEntry::
 output(ostream &out) const {
@@ -440,7 +444,7 @@ output(ostream &out) const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomMunger::Registry::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 GeomMunger::Registry::
 Registry() {

+ 77 - 27
panda/src/gobj/geomVertexData.I

@@ -414,7 +414,7 @@ INLINE int GeomVertexData::
 add_transform(TransformTable *table, const VertexTransform *transform,
               TransformMap &already_added) {
   pair<TransformMap::iterator, bool> result = already_added.insert(TransformMap::value_type(transform, table->get_num_transforms()));
-  
+
   if (result.second) {
     table->add_transform(transform);
   }
@@ -425,7 +425,7 @@ add_transform(TransformTable *table, const VertexTransform *transform,
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::CDataCache::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexData::CDataCache::
 CDataCache() {
@@ -434,7 +434,7 @@ CDataCache() {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::CDataCache::Copy Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexData::CDataCache::
 CDataCache(const GeomVertexData::CDataCache &copy) :
@@ -445,7 +445,7 @@ CDataCache(const GeomVertexData::CDataCache &copy) :
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::CacheKey::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexData::CacheKey::
 CacheKey(const GeomVertexFormat *modifier) :
@@ -453,6 +453,30 @@ CacheKey(const GeomVertexFormat *modifier) :
 {
 }
 
+////////////////////////////////////////////////////////////////////
+//     Function: GeomVertexData::CacheKey::Copy Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+INLINE GeomVertexData::CacheKey::
+CacheKey(const CacheKey &copy) :
+  _modifier(copy._modifier)
+{
+}
+
+#ifdef USE_MOVE_SEMANTICS
+////////////////////////////////////////////////////////////////////
+//     Function: GeomVertexData::CacheKey::Move Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+INLINE GeomVertexData::CacheKey::
+CacheKey(CacheKey &&from) NOEXCEPT :
+  _modifier(move(from._modifier))
+{
+}
+#endif  // USE_MOVE_SEMANTICS
+
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::CacheKey::operator <
 //       Access: Public
@@ -466,7 +490,7 @@ operator < (const CacheKey &other) const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::CacheEntry::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexData::CacheEntry::
 CacheEntry(GeomVertexData *source, const GeomVertexFormat *modifier) :
@@ -475,6 +499,32 @@ CacheEntry(GeomVertexData *source, const GeomVertexFormat *modifier) :
 {
 }
 
+////////////////////////////////////////////////////////////////////
+//     Function: GeomVertexData::CacheEntry::Copy Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+INLINE GeomVertexData::CacheEntry::
+CacheEntry(GeomVertexData *source, const CacheKey &key) :
+  _source(source),
+  _key(key)
+{
+}
+
+#ifdef USE_MOVE_SEMANTICS
+////////////////////////////////////////////////////////////////////
+//     Function: GeomVertexData::CacheEntry::Move Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+INLINE GeomVertexData::CacheEntry::
+CacheEntry(GeomVertexData *source, CacheKey &&key) NOEXCEPT :
+  _source(source),
+  _key(move(key))
+{
+}
+#endif  // USE_MOVE_SEMANTICS
+
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::CData::Constructor
 //       Access: Public
@@ -511,7 +561,7 @@ CData(const GeomVertexData::CData &copy) :
 //  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexDataPipelineBase::
-GeomVertexDataPipelineBase(GeomVertexData *object, 
+GeomVertexDataPipelineBase(GeomVertexData *object,
                            Thread *current_thread,
                            GeomVertexData::CData *cdata) :
   _object(object),
@@ -560,7 +610,7 @@ get_current_thread() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineBase::get_usage_hint
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexDataPipelineBase::UsageHint GeomVertexDataPipelineBase::
 get_usage_hint() const {
@@ -570,7 +620,7 @@ get_usage_hint() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineBase::get_format
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE const GeomVertexFormat *GeomVertexDataPipelineBase::
 get_format() const {
@@ -580,7 +630,7 @@ get_format() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineBase::has_column
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE bool GeomVertexDataPipelineBase::
 has_column(const InternalName *name) const {
@@ -590,7 +640,7 @@ has_column(const InternalName *name) const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineBase::get_num_arrays
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE int GeomVertexDataPipelineBase::
 get_num_arrays() const {
@@ -600,7 +650,7 @@ get_num_arrays() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineBase::get_array
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE CPT(GeomVertexArrayData) GeomVertexDataPipelineBase::
 get_array(int i) const {
@@ -611,7 +661,7 @@ get_array(int i) const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineBase::get_transform_table
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE const TransformTable *GeomVertexDataPipelineBase::
 get_transform_table() const {
@@ -621,7 +671,7 @@ get_transform_table() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineBase::get_transform_blend_table
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE CPT(TransformBlendTable) GeomVertexDataPipelineBase::
 get_transform_blend_table() const {
@@ -631,7 +681,7 @@ get_transform_blend_table() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineBase::get_slider_table
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE const SliderTable *GeomVertexDataPipelineBase::
 get_slider_table() const {
@@ -641,7 +691,7 @@ get_slider_table() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineBase::get_modified
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE UpdateSeq GeomVertexDataPipelineBase::
 get_modified() const {
@@ -654,7 +704,7 @@ get_modified() const {
 //  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexDataPipelineReader::
-GeomVertexDataPipelineReader(const GeomVertexData *object, 
+GeomVertexDataPipelineReader(const GeomVertexData *object,
                              Thread *current_thread) :
   GeomVertexDataPipelineBase((GeomVertexData *)object, current_thread,
                              (GeomVertexData::CData *)object->_cycler.read_unlocked(current_thread)),
@@ -668,7 +718,7 @@ GeomVertexDataPipelineReader(const GeomVertexData *object,
 //  Description: Don't attempt to copy these objects.
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexDataPipelineReader::
-GeomVertexDataPipelineReader(const GeomVertexDataPipelineReader &copy) : 
+GeomVertexDataPipelineReader(const GeomVertexDataPipelineReader &copy) :
   GeomVertexDataPipelineBase(copy)
 {
   nassertv(false);
@@ -710,7 +760,7 @@ get_object() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::check_array_readers
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE void GeomVertexDataPipelineReader::
 check_array_readers() const {
@@ -722,7 +772,7 @@ check_array_readers() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::get_array_reader
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE const GeomVertexArrayDataHandle *GeomVertexDataPipelineReader::
 get_array_reader(int i) const {
@@ -734,7 +784,7 @@ get_array_reader(int i) const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::has_vertex
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE bool GeomVertexDataPipelineReader::
 has_vertex() const {
@@ -744,7 +794,7 @@ has_vertex() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::is_vertex_transformed
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE bool GeomVertexDataPipelineReader::
 is_vertex_transformed() const {
@@ -759,7 +809,7 @@ is_vertex_transformed() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::has_normal
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE bool GeomVertexDataPipelineReader::
 has_normal() const {
@@ -769,7 +819,7 @@ has_normal() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::has_color
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE bool GeomVertexDataPipelineReader::
 has_color() const {
@@ -782,7 +832,7 @@ has_color() const {
 //  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexDataPipelineWriter::
-GeomVertexDataPipelineWriter(GeomVertexData *object, bool force_to_0, 
+GeomVertexDataPipelineWriter(GeomVertexData *object, bool force_to_0,
                              Thread *current_thread) :
   GeomVertexDataPipelineBase(object, current_thread,
                              object->_cycler.write_upstream(force_to_0, current_thread)),
@@ -803,7 +853,7 @@ GeomVertexDataPipelineWriter(GeomVertexData *object, bool force_to_0,
 //  Description: Don't attempt to copy these objects.
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexDataPipelineWriter::
-GeomVertexDataPipelineWriter(const GeomVertexDataPipelineWriter &copy) : 
+GeomVertexDataPipelineWriter(const GeomVertexDataPipelineWriter &copy) :
   GeomVertexDataPipelineBase(copy)
 {
   nassertv(false);
@@ -845,7 +895,7 @@ get_object() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineWriter::check_array_writers
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE void GeomVertexDataPipelineWriter::
 check_array_writers() const {
@@ -857,7 +907,7 @@ check_array_writers() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineWriter::get_array_writer
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE GeomVertexArrayDataHandle *GeomVertexDataPipelineWriter::
 get_array_writer(int i) const {

+ 108 - 104
panda/src/gobj/geomVertexData.cxx

@@ -64,7 +64,7 @@ make_cow_copy() {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::Constructor
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 GeomVertexData::
 GeomVertexData(const string &name,
@@ -97,7 +97,7 @@ GeomVertexData(const string &name,
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::Copy Constructor
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 GeomVertexData::
 GeomVertexData(const GeomVertexData &copy) :
@@ -127,7 +127,7 @@ GeomVertexData(const GeomVertexData &copy) :
 //               and it allows you to specify a different format.
 ////////////////////////////////////////////////////////////////////
 GeomVertexData::
-GeomVertexData(const GeomVertexData &copy, 
+GeomVertexData(const GeomVertexData &copy,
                const GeomVertexFormat *format) :
   CopyOnWriteObject(copy),
   _name(copy._name),
@@ -159,7 +159,7 @@ GeomVertexData(const GeomVertexData &copy,
   }
   CLOSE_ITERATE_ALL_STAGES(_cycler);
 }
-  
+
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::Copy Assignment Operator
 //       Access: Published
@@ -186,14 +186,14 @@ operator = (const GeomVertexData &copy) {
     cdata->_modified = Geom::get_next_modified();
     cdata->_animated_vertices = NULL;
     cdata->_animated_vertices_modified = UpdateSeq();
-  } 
+  }
   CLOSE_ITERATE_ALL_STAGES(_cycler);
 }
 
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::Destructor
 //       Access: Published, Virtual
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 GeomVertexData::
 ~GeomVertexData() {
@@ -564,22 +564,22 @@ copy_from(const GeomVertexData *source, bool keep_data_objects,
   pset<int> done_arrays;
 
   for (source_i = 0; source_i < num_arrays; ++source_i) {
-    const GeomVertexArrayFormat *source_array_format = 
+    const GeomVertexArrayFormat *source_array_format =
       source_format->get_array(source_i);
 
     bool array_done = false;
 
     int dest_num_arrays = dest_format->get_num_arrays();
-    for (int dest_i = 0; 
-         dest_i < dest_num_arrays && !array_done; 
+    for (int dest_i = 0;
+         dest_i < dest_num_arrays && !array_done;
          ++dest_i) {
-      const GeomVertexArrayFormat *dest_array_format = 
+      const GeomVertexArrayFormat *dest_array_format =
         dest_format->get_array(dest_i);
       if (dest_array_format->is_data_subset_of(*source_array_format)) {
         // Great!  Just use the same data for this one.
         if (keep_data_objects) {
-          // Copy the data, but keep the same GeomVertexArrayData object.  
-          
+          // Copy the data, but keep the same GeomVertexArrayData object.
+
           PT(GeomVertexArrayData) dest_data = modify_array(dest_i);
           CPT(GeomVertexArrayData) source_data = source->get_array(source_i);
           dest_data->modify_handle()->copy_data_from(source_data->get_handle());
@@ -613,9 +613,9 @@ copy_from(const GeomVertexData *source, bool keep_data_objects,
       int dest_i = dest_format->get_array_with(source_column->get_name());
       if (dest_i >= 0 && done_arrays.count(dest_i) == 0) {
         // The data type exists in the new format; we have to copy it.
-        const GeomVertexArrayFormat *dest_array_format = 
+        const GeomVertexArrayFormat *dest_array_format =
           dest_format->get_array(dest_i);
-        const GeomVertexColumn *dest_column = 
+        const GeomVertexColumn *dest_column =
           dest_array_format->get_column(source_column->get_name());
         nassertv(dest_column != (const GeomVertexColumn *)NULL);
 
@@ -625,12 +625,12 @@ copy_from(const GeomVertexData *source, bool keep_data_objects,
           PT(GeomVertexArrayDataHandle) dest_handle = dest_array_obj->modify_handle();
           unsigned char *dest_array_data = dest_handle->get_write_pointer();
 
-          bytewise_copy(dest_array_data + dest_column->get_start(), 
+          bytewise_copy(dest_array_data + dest_column->get_start(),
                         dest_array_format->get_stride(),
                         array_data + source_column->get_start(), source_array_format->get_stride(),
                         source_column, num_rows);
 
-        } else if (dest_column->is_packed_argb() && 
+        } else if (dest_column->is_packed_argb() &&
                    source_column->is_uint8_rgba()) {
           // A common special case: OpenGL color to DirectX color.
           PT(GeomVertexArrayData) dest_array_obj = modify_array(dest_i);
@@ -638,12 +638,12 @@ copy_from(const GeomVertexData *source, bool keep_data_objects,
           unsigned char *dest_array_data = dest_handle->get_write_pointer();
 
           uint8_rgba_to_packed_argb
-            (dest_array_data + dest_column->get_start(), 
+            (dest_array_data + dest_column->get_start(),
              dest_array_format->get_stride(),
              array_data + source_column->get_start(), source_array_format->get_stride(),
              num_rows);
 
-        } else if (dest_column->is_uint8_rgba() && 
+        } else if (dest_column->is_uint8_rgba() &&
                    source_column->is_packed_argb()) {
           // Another common special case: DirectX color to OpenGL
           // color.
@@ -652,7 +652,7 @@ copy_from(const GeomVertexData *source, bool keep_data_objects,
           unsigned char *dest_array_data = dest_handle->get_write_pointer();
 
           packed_argb_to_uint8_rgba
-            (dest_array_data + dest_column->get_start(), 
+            (dest_array_data + dest_column->get_start(),
              dest_array_format->get_stride(),
              array_data + source_column->get_start(), source_array_format->get_stride(),
              num_rows);
@@ -661,7 +661,7 @@ copy_from(const GeomVertexData *source, bool keep_data_objects,
           // A generic copy.
           if (gobj_cat.is_debug()) {
             gobj_cat.debug()
-              << "generic copy " << *dest_column << " from " 
+              << "generic copy " << *dest_column << " from "
               << *source_column << "\n";
           }
           GeomVertexWriter to(this);
@@ -695,13 +695,13 @@ copy_from(const GeomVertexData *source, bool keep_data_objects,
           GeomVertexWriter weight(this, InternalName::get_transform_weight());
           GeomVertexWriter index(this, InternalName::get_transform_index());
           GeomVertexReader from(source, InternalName::get_transform_blend());
-        
+
           while (!from.is_at_end()) {
             const TransformBlend &blend = blend_table->get_blend(from.get_data1i());
             LVecBase4 weights = LVecBase4::zero();
             LVecBase4i indices(0, 0, 0, 0);
             nassertv(blend.get_num_transforms() <= 4);
-            
+
             for (int i = 0; i < blend.get_num_transforms(); i++) {
               weights[i] = blend.get_weight(i);
               indices[i] = add_transform(transform_table, blend.get_transform(i),
@@ -717,11 +717,11 @@ copy_from(const GeomVertexData *source, bool keep_data_objects,
           // use the same n transforms, in the same order, for each vertex.
           GeomVertexWriter weight(this, InternalName::get_transform_weight());
           GeomVertexReader from(source, InternalName::get_transform_blend());
-        
+
           while (!from.is_at_end()) {
             const TransformBlend &blend = blend_table->get_blend(from.get_data1i());
             LVecBase4 weights = LVecBase4::zero();
-            
+
             for (int i = 0; i < blend.get_num_transforms(); i++) {
               int index = add_transform(transform_table, blend.get_transform(i),
                                         already_added);
@@ -733,7 +733,7 @@ copy_from(const GeomVertexData *source, bool keep_data_objects,
             }
           }
         }
-        
+
         clear_transform_blend_table();
         set_transform_table(TransformTable::register_table(transform_table));
       }
@@ -754,7 +754,7 @@ copy_from(const GeomVertexData *source, bool keep_data_objects,
 //               have recently made in an upstream thread.
 ////////////////////////////////////////////////////////////////////
 void GeomVertexData::
-copy_row_from(int dest_row, const GeomVertexData *source, 
+copy_row_from(int dest_row, const GeomVertexData *source,
               int source_row, Thread *current_thread) {
   const GeomVertexFormat *source_format = source->get_format();
   const GeomVertexFormat *dest_format = get_format();
@@ -842,7 +842,7 @@ convert_to(const GeomVertexFormat *new_format) const {
   }
   PStatTimer timer(_convert_pcollector);
 
-  PT(GeomVertexData) new_data = 
+  PT(GeomVertexData) new_data =
     new GeomVertexData(get_name(), new_format, get_usage_hint());
   new_data->set_transform_blend_table(get_transform_blend_table());
   new_data->set_slider_table(get_slider_table());
@@ -852,7 +852,12 @@ convert_to(const GeomVertexFormat *new_format) const {
   // Record the new result in the cache.
   if (entry == (CacheEntry *)NULL) {
     // Create a new entry for the result.
-    entry = new CacheEntry((GeomVertexData *)this, new_format);
+#ifdef USE_MOVE_SEMANTICS
+    // We don't need the key anymore, move the pointers into the CacheEntry.
+    entry = new CacheEntry((GeomVertexData *)this, move(key));
+#else
+    entry = new CacheEntry((GeomVertexData *)this, key);
+#endif
     {
       LightMutexHolder holder(_cache_lock);
       bool inserted = ((GeomVertexData *)this)->_cache.insert(Cache::value_type(&entry->_key, entry)).second;
@@ -862,7 +867,7 @@ convert_to(const GeomVertexFormat *new_format) const {
         return new_data;
       }
     }
-    
+
     // And tell the cache manager about the new entry.  (It might
     // immediately request a delete from the cache of the thing we
     // just added.)
@@ -876,7 +881,6 @@ convert_to(const GeomVertexFormat *new_format) const {
   return new_data;
 }
 
-
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::scale_color
 //       Access: Published
@@ -888,7 +892,7 @@ convert_to(const GeomVertexFormat *new_format) const {
 ////////////////////////////////////////////////////////////////////
 CPT(GeomVertexData) GeomVertexData::
 scale_color(const LVecBase4 &color_scale) const {
-  const GeomVertexColumn *old_column = 
+  const GeomVertexColumn *old_column =
     get_format()->get_column(InternalName::get_color());
   if (old_column == (GeomVertexColumn *)NULL) {
     return this;
@@ -965,7 +969,7 @@ scale_color(const LVecBase4 &color_scale, int num_components,
 ////////////////////////////////////////////////////////////////////
 CPT(GeomVertexData) GeomVertexData::
 set_color(const LColor &color) const {
-  const GeomVertexColumn *old_column = 
+  const GeomVertexColumn *old_column =
     get_format()->get_column(InternalName::get_color());
   if (old_column == (GeomVertexColumn *)NULL) {
     return this;
@@ -1026,7 +1030,7 @@ set_color(const LColor &color, int num_components,
 ////////////////////////////////////////////////////////////////////
 CPT(GeomVertexData) GeomVertexData::
 reverse_normals() const {
-  const GeomVertexColumn *old_column = 
+  const GeomVertexColumn *old_column =
     get_format()->get_column(InternalName::get_normal());
   if (old_column == (GeomVertexColumn *)NULL) {
     return this;
@@ -1094,16 +1098,16 @@ animate_vertices(bool force, Thread *current_thread) const {
     PStatTimer timer2(((GeomVertexData *)this)->_blends_pcollector, current_thread);
     if (!cdata->_transform_blend_table.is_null()) {
       if (cdata->_slider_table != (SliderTable *)NULL) {
-        modified = 
+        modified =
           max(cdata->_transform_blend_table.get_read_pointer()->get_modified(current_thread),
               cdata->_slider_table->get_modified(current_thread));
       } else {
         modified = cdata->_transform_blend_table.get_read_pointer()->get_modified(current_thread);
       }
-      
+
     } else if (cdata->_slider_table != (SliderTable *)NULL) {
       modified = cdata->_slider_table->get_modified(current_thread);
-      
+
     } else {
       // No transform blend table or slider table--ergo, no vertex
       // animation.
@@ -1185,7 +1189,7 @@ transform_vertices(const LMatrix4 &mat, int begin_row, int end_row) {
     GeomVertexRewriter data(this, format->get_point(ci));
     do_transform_point_column(format, data, mat, begin_row, end_row);
   }
-  
+
   for (ci = 0; ci < format->get_num_vectors(); ci++) {
     GeomVertexRewriter data(this, format->get_vector(ci));
     do_transform_vector_column(format, data, mat, begin_row, end_row);
@@ -1208,7 +1212,7 @@ bytewise_copy(unsigned char *to, int to_stride,
       << ", " << (const void *)from << ", " << from_stride
       << ", " << *from_type << ", " << num_records << ")\n";
   }
-  if (to_stride == from_type->get_total_bytes() && 
+  if (to_stride == from_type->get_total_bytes() &&
       from_stride == from_type->get_total_bytes()) {
     // Fantastic!  It's just a linear array of this one data type.
     // Copy the whole thing all at once.
@@ -1257,32 +1261,32 @@ replace_column(InternalName *name, int num_components,
       // just drop the whole array.
       new_format->remove_array(old_type_array);
       removed_type_array = true;
-      
+
     } else {
       // Remove the description for the type, but don't bother to
       // repack the array.
       array_format->remove_column(name);
     }
   }
-    
+
   // Now define a new array to contain just the type.
   int new_type_array = -1;
   if (num_components != 0) {
-    PT(GeomVertexArrayFormat) type_array_format = 
+    PT(GeomVertexArrayFormat) type_array_format =
       new GeomVertexArrayFormat(name, num_components, numeric_type, contents);
     new_type_array = new_format->add_array(type_array_format);
   }
 
-  CPT(GeomVertexFormat) format = 
+  CPT(GeomVertexFormat) format =
     GeomVertexFormat::register_format(new_format);
 
   if (gobj_cat.is_debug()) {
     gobj_cat.debug()
       << "Replacing data type " << *name << "; converting "
-      << get_num_rows() << " rows from " 
+      << get_num_rows() << " rows from "
       << *cdata->_format << " to " << *format << "\n";
   }
-  
+
   PT(GeomVertexData) new_data = new GeomVertexData(*this, format);
 
   int j = 0;
@@ -1320,7 +1324,7 @@ replace_column(InternalName *name, int num_components,
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::output
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void GeomVertexData::
 output(ostream &out) const {
@@ -1333,7 +1337,7 @@ output(ostream &out) const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::write
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void GeomVertexData::
 write(ostream &out, int indent_level) const {
@@ -1369,16 +1373,16 @@ describe_vertex(ostream &out, int row) const {
   if (format->get_animation().get_animation_type() == AT_panda) {
     tb_table = get_transform_blend_table();
   }
-  
+
   int num_columns = format->get_num_columns();
   for (int ci = 0; ci < num_columns; ++ci) {
     int ai = format->get_array_with(ci);
     const GeomVertexColumn *column = format->get_column(ci);
     reader.set_column(ai, column);
-    
+
     int num_values = min(column->get_num_values(), 4);
     const LVecBase4 &d = reader.get_data4();
-    
+
     out << "  " << *column->get_name();
     for (int v = 0; v < num_values; v++) {
       out << " " << d[v];
@@ -1545,7 +1549,7 @@ update_animated_vertices(GeomVertexData::CData *cdata, Thread *current_thread) {
 
   if (cdata->_animated_vertices == (GeomVertexData *)NULL) {
     new_format = orig_format->get_post_animated_format();
-    cdata->_animated_vertices = 
+    cdata->_animated_vertices =
       new GeomVertexData(get_name(), new_format,
                          min(get_usage_hint(), UH_dynamic));
   }
@@ -1670,7 +1674,7 @@ update_animated_vertices(GeomVertexData::CData *cdata, Thread *current_thread) {
 
     CPT(GeomVertexArrayFormat) blend_array_format = orig_format->get_array(blend_array_index);
 
-    if (blend_array_format->get_stride() == 2 && 
+    if (blend_array_format->get_stride() == 2 &&
         blend_array_format->get_column(0)->get_component_bytes() == 2) {
       // The blend indices are a table of ushorts.  Optimize this
       // common case.
@@ -1688,11 +1692,11 @@ update_animated_vertices(GeomVertexData::CData *cdata, Thread *current_thread) {
 
           int first_vertex = begin;
           int first_bi = blendt[first_vertex];
-          
+
           while (first_vertex < end) {
             // At this point, first_vertex is the first of a series of
             // vertices that shares the blend index first_bi.
-            
+
             // Scan for the end of this series of vertices--we're
             // looking for the next vertex with a different blend index.
             int next_vertex = first_vertex;
@@ -1705,13 +1709,13 @@ update_animated_vertices(GeomVertexData::CData *cdata, Thread *current_thread) {
               }
               ++next_vertex;
             }
-            
+
             // We've just reached the end of the vertices with a matching
             // blend index.  Transform all those vertices as a block.
             LMatrix4 mat;
             tb_table->get_blend(first_bi).get_blend(mat, current_thread);
             new_data->do_transform_point_column(new_format, data, mat, first_vertex, next_vertex);
-            
+
             first_vertex = next_vertex;
             first_bi = next_bi;
           }
@@ -1728,11 +1732,11 @@ update_animated_vertices(GeomVertexData::CData *cdata, Thread *current_thread) {
 
           int first_vertex = begin;
           int first_bi = blendt[first_vertex];
-          
+
           while (first_vertex < end) {
             // At this point, first_vertex is the first of a series of
             // vertices that shares the blend index first_bi.
-            
+
             // Scan for the end of this series of vertices--we're
             // looking for the next vertex with a different blend index.
             int next_vertex = first_vertex;
@@ -1745,13 +1749,13 @@ update_animated_vertices(GeomVertexData::CData *cdata, Thread *current_thread) {
               }
               ++next_vertex;
             }
-            
+
             // We've just reached the end of the vertices with a matching
             // blend index.  Transform all those vertices as a block.
             LMatrix4 mat;
             tb_table->get_blend(first_bi).get_blend(mat, current_thread);
             new_data->do_transform_vector_column(new_format, data, mat, first_vertex, next_vertex);
-            
+
             first_vertex = next_vertex;
             first_bi = next_bi;
           }
@@ -1773,14 +1777,14 @@ update_animated_vertices(GeomVertexData::CData *cdata, Thread *current_thread) {
           int end = rows.get_subrange_end(i);
           nassertv(begin < end);
           blendi.set_row_unsafe(begin);
-          
+
           int first_vertex = begin;
           int first_bi = blendi.get_data1i();
-          
+
           while (first_vertex < end) {
             // At this point, first_vertex is the first of a series of
             // vertices that shares the blend index first_bi.
-            
+
             // Scan for the end of this series of vertices--we're
             // looking for the next vertex with a different blend index.
             int next_vertex = first_vertex;
@@ -1793,13 +1797,13 @@ update_animated_vertices(GeomVertexData::CData *cdata, Thread *current_thread) {
               }
               ++next_vertex;
             }
-            
+
             // We've just reached the end of the vertices with a matching
             // blend index.  Transform all those vertices as a block.
             LMatrix4 mat;
             tb_table->get_blend(first_bi).get_blend(mat, current_thread);
             new_data->do_transform_point_column(new_format, data, mat, first_vertex, next_vertex);
-            
+
             first_vertex = next_vertex;
             first_bi = next_bi;
           }
@@ -1814,14 +1818,14 @@ update_animated_vertices(GeomVertexData::CData *cdata, Thread *current_thread) {
           int end = rows.get_subrange_end(i);
           nassertv(begin != end);
           blendi.set_row_unsafe(begin);
-          
+
           int first_vertex = begin;
           int first_bi = blendi.get_data1i();
-          
+
           while (first_vertex < end) {
             // At this point, first_vertex is the first of a series of
             // vertices that shares the blend index first_bi.
-            
+
             // Scan for the end of this series of vertices--we're
             // looking for the next vertex with a different blend index.
             int next_vertex = first_vertex;
@@ -1834,13 +1838,13 @@ update_animated_vertices(GeomVertexData::CData *cdata, Thread *current_thread) {
               }
               ++next_vertex;
             }
-            
+
             // We've just reached the end of the vertices with a matching
             // blend index.  Transform all those vertices as a block.
             LMatrix4 mat;
             tb_table->get_blend(first_bi).get_blend(mat, current_thread);
             new_data->do_transform_vector_column(new_format, data, mat, first_vertex, next_vertex);
-            
+
             first_vertex = next_vertex;
             first_bi = next_bi;
           }
@@ -1880,11 +1884,11 @@ do_transform_point_column(const GeomVertexFormat *format, GeomVertexRewriter &da
     } else {
       table_xform_vecbase4f(datat, num_rows, stride, matf);
     }
-    
+
   } else if (num_values == 4) {
     // Use the GeomVertexRewriter to adjust the 4-component
     // points.
-  
+
     data.set_row_unsafe(begin_row);
     for (int j = begin_row; j < end_row; ++j) {
       LPoint4 vertex = data.get_data4();
@@ -2110,7 +2114,7 @@ finalize(BamReader *manager) {
   CDWriter cdata(_cycler, true);
 
   for (size_t i = 0; i < cdata->_arrays.size(); ++i) {
-    CPT(GeomVertexFormat) new_format = 
+    CPT(GeomVertexFormat) new_format =
       GeomVertexFormat::register_format(cdata->_format);
     manager->change_pointer(cdata->_format, new_format);
     cdata->_format = new_format;
@@ -2124,14 +2128,14 @@ finalize(BamReader *manager) {
   }
 
   if (cdata->_transform_table != (TransformTable *)NULL) {
-    CPT(TransformTable) new_transform_table = 
+    CPT(TransformTable) new_transform_table =
       TransformTable::register_table(cdata->_transform_table);
     manager->change_pointer(cdata->_transform_table, new_transform_table);
     cdata->_transform_table = new_transform_table;
   }
 
   if (cdata->_slider_table != (SliderTable *)NULL) {
-    CPT(SliderTable) new_slider_table = 
+    CPT(SliderTable) new_slider_table =
       SliderTable::register_table(cdata->_slider_table);
     manager->change_pointer(cdata->_slider_table, new_slider_table);
     cdata->_slider_table = new_slider_table;
@@ -2181,11 +2185,11 @@ evict_callback() {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexData::CacheEntry::output
 //       Access: Public, Virtual
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void GeomVertexData::CacheEntry::
 output(ostream &out) const {
-  out << "vertex data " << (void *)_source << " to " 
+  out << "vertex data " << (void *)_source << " to "
       << *_key._modifier;
 }
 
@@ -2236,7 +2240,7 @@ complete_pointers(TypedWritable **p_list, BamReader *manager) {
 
   Arrays::iterator ai;
   for (ai = _arrays.begin(); ai != _arrays.end(); ++ai) {
-    (*ai) = DCAST(GeomVertexArrayData, p_list[pi++]);    
+    (*ai) = DCAST(GeomVertexArrayData, p_list[pi++]);
   }
 
   _transform_table = DCAST(TransformTable, p_list[pi++]);
@@ -2295,7 +2299,7 @@ fillin(DatagramIterator &scan, BamReader *manager) {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineBase::get_num_bytes
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 int GeomVertexDataPipelineBase::
 get_num_bytes() const {
@@ -2312,7 +2316,7 @@ get_num_bytes() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::get_num_rows
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 int GeomVertexDataPipelineReader::
 get_num_rows() const {
@@ -2332,13 +2336,13 @@ get_num_rows() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::get_array_info
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 bool GeomVertexDataPipelineReader::
-get_array_info(const InternalName *name, 
+get_array_info(const InternalName *name,
                const GeomVertexArrayDataHandle *&array_reader,
-               int &num_values, 
-               GeomVertexDataPipelineReader::NumericType &numeric_type, 
+               int &num_values,
+               GeomVertexDataPipelineReader::NumericType &numeric_type,
                int &start, int &stride) const {
   nassertr(_got_array_readers, false);
   int array_index;
@@ -2357,12 +2361,12 @@ get_array_info(const InternalName *name,
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::get_vertex_info
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 bool GeomVertexDataPipelineReader::
 get_vertex_info(const GeomVertexArrayDataHandle *&array_reader,
-                int &num_values, 
-                GeomVertexDataPipelineReader::NumericType &numeric_type, 
+                int &num_values,
+                GeomVertexDataPipelineReader::NumericType &numeric_type,
                 int &start, int &stride) const {
   nassertr(_got_array_readers, false);
   int array_index = _cdata->_format->get_vertex_array_index();
@@ -2382,11 +2386,11 @@ get_vertex_info(const GeomVertexArrayDataHandle *&array_reader,
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::get_normal_info
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 bool GeomVertexDataPipelineReader::
 get_normal_info(const GeomVertexArrayDataHandle *&array_reader,
-                GeomVertexDataPipelineReader::NumericType &numeric_type, 
+                GeomVertexDataPipelineReader::NumericType &numeric_type,
                 int &start, int &stride) const {
   nassertr(_got_array_readers, false);
   int array_index = _cdata->_format->get_normal_array_index();
@@ -2405,12 +2409,12 @@ get_normal_info(const GeomVertexArrayDataHandle *&array_reader,
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::get_color_info
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 bool GeomVertexDataPipelineReader::
 get_color_info(const GeomVertexArrayDataHandle *&array_reader,
-               int &num_values, 
-               GeomVertexDataPipelineReader::NumericType &numeric_type, 
+               int &num_values,
+               GeomVertexDataPipelineReader::NumericType &numeric_type,
                int &start, int &stride) const {
   nassertr(_got_array_readers, false);
   int array_index = _cdata->_format->get_color_array_index();
@@ -2430,7 +2434,7 @@ get_color_info(const GeomVertexArrayDataHandle *&array_reader,
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::make_array_readers
 //       Access: Private
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void GeomVertexDataPipelineReader::
 make_array_readers() {
@@ -2449,7 +2453,7 @@ make_array_readers() {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineReader::delete_array_readers
 //       Access: Private
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void GeomVertexDataPipelineReader::
 delete_array_readers() {
@@ -2462,7 +2466,7 @@ delete_array_readers() {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineWriter::get_num_rows
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 int GeomVertexDataPipelineWriter::
 get_num_rows() const {
@@ -2482,7 +2486,7 @@ get_num_rows() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineWriter::set_num_rows
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 bool GeomVertexDataPipelineWriter::
 set_num_rows(int n) {
@@ -2510,10 +2514,10 @@ set_num_rows(int n) {
     // (1, 1, 1, 1), for the programmer's convenience.
     GeomVertexArrayDataHandle *array_writer = _array_writers[color_array];
     const GeomVertexArrayFormat *array_format = array_writer->get_array_format();
-    const GeomVertexColumn *column = 
+    const GeomVertexColumn *column =
       array_format->get_column(InternalName::get_color());
     int stride = array_format->get_stride();
-    unsigned char *start = 
+    unsigned char *start =
       array_writer->get_write_pointer() + column->get_start();
     unsigned char *stop = start + array_writer->get_data_size_bytes();
     unsigned char *pointer = start + stride * orig_color_rows;
@@ -2554,7 +2558,7 @@ set_num_rows(int n) {
     case NT_stdfloat:
       // Shouldn't have this type in the format.
       nassertr(false, false);
-    }          
+    }
   }
 
   if (any_changed) {
@@ -2569,7 +2573,7 @@ set_num_rows(int n) {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineWriter::unclean_set_num_rows
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 bool GeomVertexDataPipelineWriter::
 unclean_set_num_rows(int n) {
@@ -2598,7 +2602,7 @@ unclean_set_num_rows(int n) {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineWriter::reserve_num_rows
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 bool GeomVertexDataPipelineWriter::
 reserve_num_rows(int n) {
@@ -2619,7 +2623,7 @@ reserve_num_rows(int n) {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineWriter::modify_array
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 PT(GeomVertexArrayData) GeomVertexDataPipelineWriter::
 modify_array(int i) {
@@ -2642,7 +2646,7 @@ modify_array(int i) {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineWriter::set_array
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void GeomVertexDataPipelineWriter::
 set_array(int i, const GeomVertexArrayData *array) {
@@ -2660,7 +2664,7 @@ set_array(int i, const GeomVertexArrayData *array) {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineWriter::make_array_writers
 //       Access: Private
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void GeomVertexDataPipelineWriter::
 make_array_writers() {
@@ -2683,7 +2687,7 @@ make_array_writers() {
 ////////////////////////////////////////////////////////////////////
 //     Function: GeomVertexDataPipelineWriter::delete_array_writers
 //       Access: Private
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void GeomVertexDataPipelineWriter::
 delete_array_writers() {

+ 33 - 24
panda/src/gobj/geomVertexData.h

@@ -78,10 +78,10 @@ private:
   GeomVertexData();
 protected:
   virtual PT(CopyOnWriteObject) make_cow_copy();
-  
+
 PUBLISHED:
   GeomVertexData(const string &name,
-                 const GeomVertexFormat *format, 
+                 const GeomVertexFormat *format,
                  UsageHint usage_hint);
   GeomVertexData(const GeomVertexData &copy);
   GeomVertexData(const GeomVertexData &copy,
@@ -136,17 +136,17 @@ PUBLISHED:
 
   void copy_from(const GeomVertexData *source, bool keep_data_objects,
                  Thread *current_thread = Thread::get_current_thread());
-  void copy_row_from(int dest_row, const GeomVertexData *source, 
+  void copy_row_from(int dest_row, const GeomVertexData *source,
                      int source_row, Thread *current_thread);
   CPT(GeomVertexData) convert_to(const GeomVertexFormat *new_format) const;
-  CPT(GeomVertexData) 
+  CPT(GeomVertexData)
     scale_color(const LVecBase4 &color_scale) const;
-  CPT(GeomVertexData) 
+  CPT(GeomVertexData)
     scale_color(const LVecBase4 &color_scale, int num_components,
                 NumericType numeric_type, Contents contents) const;
-  CPT(GeomVertexData) 
+  CPT(GeomVertexData)
     set_color(const LColor &color) const;
-  CPT(GeomVertexData) 
+  CPT(GeomVertexData)
     set_color(const LColor &color, int num_components,
               NumericType numeric_type, Contents contents) const;
 
@@ -157,7 +157,7 @@ PUBLISHED:
   void transform_vertices(const LMatrix4 &mat);
   void transform_vertices(const LMatrix4 &mat, int begin_row, int end_row);
 
-  PT(GeomVertexData) 
+  PT(GeomVertexData)
     replace_column(InternalName *name, int num_components,
                    NumericType numeric_type, Contents contents) const;
 
@@ -191,7 +191,7 @@ private:
                             int num_records);
 
   typedef pmap<const VertexTransform *, int> TransformMap;
-  INLINE static int 
+  INLINE static int
   add_transform(TransformTable *table, const VertexTransform *transform,
                 TransformMap &already_added);
 
@@ -212,7 +212,7 @@ private:
     }
 
     CPT(GeomVertexData) _result;
-    
+
   public:
     static TypeHandle get_class_type() {
       return _type_handle;
@@ -220,7 +220,7 @@ private:
     static void init_type() {
       register_type(_type_handle, "GeomVertexData::CDataCache");
     }
-    
+
   private:
     static TypeHandle _type_handle;
   };
@@ -236,15 +236,24 @@ public:
   class EXPCL_PANDA_GOBJ CacheKey {
   public:
     INLINE CacheKey(const GeomVertexFormat *modifier);
+    INLINE CacheKey(const CacheKey &copy);
+#ifdef USE_MOVE_SEMANTICS
+    INLINE CacheKey(CacheKey &&from) NOEXCEPT;
+#endif
+
     INLINE bool operator < (const CacheKey &other) const;
 
     CPT(GeomVertexFormat) _modifier;
   };
-  // It is not clear why MSVC7 needs this class to be public.  
+  // It is not clear why MSVC7 needs this class to be public.
   class EXPCL_PANDA_GOBJ CacheEntry : public GeomCacheEntry {
   public:
     INLINE CacheEntry(GeomVertexData *source,
                       const GeomVertexFormat *modifier);
+    INLINE CacheEntry(GeomVertexData *source, const CacheKey &key);
+#ifdef USE_MOVE_SEMANTICS
+    INLINE CacheEntry(GeomVertexData *source, CacheKey &&key) NOEXCEPT;
+#endif
     ALLOC_DELETED_CHAIN(CacheEntry);
 
     virtual void evict_callback();
@@ -254,7 +263,7 @@ public:
     CacheKey _key;
 
     PipelineCycler<CDataCache> _cycler;
-    
+
   public:
     static TypeHandle get_class_type() {
       return _type_handle;
@@ -264,7 +273,7 @@ public:
       register_type(_type_handle, "GeomVertexData::CacheEntry",
                     GeomCacheEntry::get_class_type());
     }
-    
+
   private:
     static TypeHandle _type_handle;
   };
@@ -294,7 +303,7 @@ private:
     PT(GeomVertexData) _animated_vertices;
     UpdateSeq _animated_vertices_modified;
     UpdateSeq _modified;
-    
+
   public:
     static TypeHandle get_class_type() {
       return _type_handle;
@@ -302,7 +311,7 @@ private:
     static void init_type() {
       register_type(_type_handle, "GeomVertexData::CData");
     }
-    
+
   private:
     static TypeHandle _type_handle;
   };
@@ -323,11 +332,11 @@ private:
                                  const LMatrix4 &mat, int begin_row, int end_row);
   void do_transform_vector_column(const GeomVertexFormat *format, GeomVertexRewriter &data,
                                   const LMatrix4 &mat, int begin_row, int end_row);
-  static void table_xform_point3f(unsigned char *datat, size_t num_rows, 
+  static void table_xform_point3f(unsigned char *datat, size_t num_rows,
                                   size_t stride, const LMatrix4f &matf);
-  static void table_xform_vector3f(unsigned char *datat, size_t num_rows, 
+  static void table_xform_vector3f(unsigned char *datat, size_t num_rows,
                                    size_t stride, const LMatrix4f &matf);
-  static void table_xform_vecbase4f(unsigned char *datat, size_t num_rows, 
+  static void table_xform_vecbase4f(unsigned char *datat, size_t num_rows,
                                     size_t stride, const LMatrix4f &matf);
 
   static PStatCollector _convert_pcollector;
@@ -386,7 +395,7 @@ private:
 ////////////////////////////////////////////////////////////////////
 class EXPCL_PANDA_GOBJ GeomVertexDataPipelineBase : public GeomEnums {
 protected:
-  INLINE GeomVertexDataPipelineBase(GeomVertexData *object, 
+  INLINE GeomVertexDataPipelineBase(GeomVertexData *object,
                                     Thread *current_thread,
                                     GeomVertexData::CData *cdata);
 
@@ -436,15 +445,15 @@ public:
   INLINE const GeomVertexArrayDataHandle *get_array_reader(int i) const;
   int get_num_rows() const;
 
-  bool get_array_info(const InternalName *name, 
+  bool get_array_info(const InternalName *name,
                       const GeomVertexArrayDataHandle *&array_reader,
-                      int &num_values, NumericType &numeric_type, 
+                      int &num_values, NumericType &numeric_type,
                       int &start, int &stride) const;
 
   INLINE bool has_vertex() const;
   INLINE bool is_vertex_transformed() const;
   bool get_vertex_info(const GeomVertexArrayDataHandle *&array_reader,
-                       int &num_values, NumericType &numeric_type, 
+                       int &num_values, NumericType &numeric_type,
                        int &start, int &stride) const;
 
   INLINE bool has_normal() const;
@@ -454,7 +463,7 @@ public:
 
   INLINE bool has_color() const;
   bool get_color_info(const GeomVertexArrayDataHandle *&array_reader,
-                      int &num_values, NumericType &numeric_type, 
+                      int &num_values, NumericType &numeric_type,
                       int &start, int &stride) const;
 
 private:

+ 12 - 1
panda/src/linmath/lmatrix3_src.I

@@ -385,6 +385,17 @@ is_nan() const {
     cnan(_m(2, 0)) || cnan(_m(2, 1)) || cnan(_m(2, 2));
 }
 
+////////////////////////////////////////////////////////////////////
+//     Function: LMatrix3::is_identity
+//       Access: Public
+//  Description: Returns true if this is (close enough to) the
+//               identity matrix, false otherwise.
+////////////////////////////////////////////////////////////////////
+INLINE_LINMATH bool FLOATNAME(LMatrix3)::
+is_identity() const {
+  return almost_equal(ident_mat(), NEARLY_ZERO(FLOATTYPE));
+}
+
 ////////////////////////////////////////////////////////////////////
 //     Function: LMatrix3::get_cell
 //       Access: Published
@@ -504,7 +515,7 @@ operator == (const FLOATNAME(LMatrix3) &other) const {
 ////////////////////////////////////////////////////////////////////
 INLINE_LINMATH bool FLOATNAME(LMatrix3)::
 operator != (const FLOATNAME(LMatrix3) &other) const {
-  return !operator == (other);
+  return compare_to(other) != 0;
 }
 
 ////////////////////////////////////////////////////////////////////

+ 9 - 6
panda/src/linmath/lmatrix3_src.cxx

@@ -77,7 +77,7 @@ set_scale_shear_mat(const FLOATNAME(LVecBase3) &scale,
           shear._v(1) * scale._v(2), shear._v(2) * scale._v(2), scale._v(2));
     }
     break;
-    
+
   case CS_zup_left:
     if (temp_hpr_fix) {
       set(scale._v(0), shear._v(0) * scale._v(0), 0.0f,
@@ -89,7 +89,7 @@ set_scale_shear_mat(const FLOATNAME(LVecBase3) &scale,
           -shear._v(1) * scale._v(2), -shear._v(2) * scale._v(2), scale._v(2));
     }
     break;
-    
+
   case CS_yup_right:
     if (temp_hpr_fix) {
       set(scale._v(0), 0.0f, shear._v(1) * scale._v(0),
@@ -101,7 +101,7 @@ set_scale_shear_mat(const FLOATNAME(LVecBase3) &scale,
           shear._v(1) * scale._v(2), 0.0f, scale._v(2));
     }
     break;
-    
+
   case CS_yup_left:
     if (temp_hpr_fix) {
       set(scale._v(0), 0.0f, -shear._v(1) * scale._v(0),
@@ -113,7 +113,7 @@ set_scale_shear_mat(const FLOATNAME(LVecBase3) &scale,
           -shear._v(1) * scale._v(2), 0.0f, scale._v(2));
     }
     break;
-    
+
   case CS_default:
   case CS_invalid:
   default:
@@ -352,6 +352,9 @@ set_rotate_mat_normaxis(FLOATTYPE angle, const FLOATNAME(LVecBase3) &axis,
 bool FLOATNAME(LMatrix3)::
 almost_equal(const FLOATNAME(LMatrix3) &other, FLOATTYPE threshold) const {
   TAU_PROFILE("bool LMatrix3::almost_equal(const LMatrix3 &, FLOATTYPE)", " ", TAU_USER);
+#ifdef HAVE_EIGEN
+  return ((_m - other._m).cwiseAbs().maxCoeff() < NEARLY_ZERO(FLOATTYPE));
+#else
   return (IS_THRESHOLD_EQUAL((*this)(0, 0), other(0, 0), threshold) &&
           IS_THRESHOLD_EQUAL((*this)(0, 1), other(0, 1), threshold) &&
           IS_THRESHOLD_EQUAL((*this)(0, 2), other(0, 2), threshold) &&
@@ -361,9 +364,9 @@ almost_equal(const FLOATNAME(LMatrix3) &other, FLOATTYPE threshold) const {
           IS_THRESHOLD_EQUAL((*this)(2, 0), other(2, 0), threshold) &&
           IS_THRESHOLD_EQUAL((*this)(2, 1), other(2, 1), threshold) &&
           IS_THRESHOLD_EQUAL((*this)(2, 2), other(2, 2), threshold));
+#endif
 }
 
-
 ////////////////////////////////////////////////////////////////////
 //     Function: LMatrix3::output
 //       Access: Published
@@ -440,7 +443,7 @@ void FLOATNAME(LMatrix3)::
 write_datagram_fixed(Datagram &destination) const {
   for (int i = 0; i < 3; ++i) {
     for (int j = 0; j < 3; ++j) {
-#if FLOATTOKEN == 'f' 
+#if FLOATTOKEN == 'f'
       destination.add_float32(get_cell(i,j));
 #else
       destination.add_float64(get_cell(i,j));

+ 7 - 6
panda/src/linmath/lmatrix3_src.h

@@ -99,6 +99,7 @@ PUBLISHED:
   INLINE_LINMATH static int size();
 
   INLINE_LINMATH bool is_nan() const;
+  INLINE_LINMATH bool is_identity() const;
 
   INLINE_LINMATH FLOATTYPE get_cell(int row, int col) const;
   INLINE_LINMATH void set_cell(int row, int col, FLOATTYPE value);
@@ -247,28 +248,28 @@ PUBLISHED:
     scale_mat(FLOATTYPE sx, FLOATTYPE sy, FLOATTYPE sz);
 
   INLINE_LINMATH void
-    set_shear_mat(const FLOATNAME(LVecBase3) &shear, 
+    set_shear_mat(const FLOATNAME(LVecBase3) &shear,
                   CoordinateSystem cs = CS_default);
 
   static INLINE_LINMATH FLOATNAME(LMatrix3)
-    shear_mat(const FLOATNAME(LVecBase3) &shear, 
+    shear_mat(const FLOATNAME(LVecBase3) &shear,
               CoordinateSystem cs = CS_default);
   static INLINE_LINMATH FLOATNAME(LMatrix3)
-    shear_mat(FLOATTYPE shxy, FLOATTYPE shxz, FLOATTYPE shyz, 
+    shear_mat(FLOATTYPE shxy, FLOATTYPE shxz, FLOATTYPE shyz,
               CoordinateSystem cs = CS_default);
 
   void
     set_scale_shear_mat(const FLOATNAME(LVecBase3) &scale,
-                        const FLOATNAME(LVecBase3) &shear, 
+                        const FLOATNAME(LVecBase3) &shear,
                         CoordinateSystem cs = CS_default);
 
   static INLINE_LINMATH FLOATNAME(LMatrix3)
     scale_shear_mat(const FLOATNAME(LVecBase3) &scale,
-                    const FLOATNAME(LVecBase3) &shear, 
+                    const FLOATNAME(LVecBase3) &shear,
                     CoordinateSystem cs = CS_default);
   static INLINE_LINMATH FLOATNAME(LMatrix3)
     scale_shear_mat(FLOATTYPE sx, FLOATTYPE sy, FLOATTYPE sz,
-                    FLOATTYPE shxy, FLOATTYPE shxz, FLOATTYPE shyz, 
+                    FLOATTYPE shxy, FLOATTYPE shxz, FLOATTYPE shyz,
                     CoordinateSystem cs = CS_default);
 
   static const FLOATNAME(LMatrix3) &convert_mat(CoordinateSystem from,

+ 13 - 1
panda/src/linmath/lmatrix4_src.I

@@ -616,6 +616,18 @@ is_nan() const {
     cnan(_m(3, 0)) || cnan(_m(3, 1)) || cnan(_m(3, 2)) || cnan(_m(3, 3));
 }
 
+////////////////////////////////////////////////////////////////////
+//     Function: LMatrix4::is_identity
+//       Access: Public
+//  Description: Returns true if this is (close enough to) the
+//               identity matrix, false otherwise.
+////////////////////////////////////////////////////////////////////
+INLINE_LINMATH bool FLOATNAME(LMatrix4)::
+is_identity() const {
+  // Eigen has isIdentity, but it seems to be twice as slow as this.
+  return almost_equal(ident_mat(), NEARLY_ZERO(FLOATTYPE));
+}
+
 ////////////////////////////////////////////////////////////////////
 //     Function: LMatrix4::get_cell
 //       Access: Public
@@ -735,7 +747,7 @@ operator == (const FLOATNAME(LMatrix4) &other) const {
 ////////////////////////////////////////////////////////////////////
 INLINE_LINMATH bool FLOATNAME(LMatrix4)::
 operator != (const FLOATNAME(LMatrix4) &other) const {
-  return !operator == (other);
+  return compare_to(other) != 0;
 }
 
 ////////////////////////////////////////////////////////////////////

+ 9 - 4
panda/src/linmath/lmatrix4_src.cxx

@@ -182,7 +182,7 @@ set_rotate_mat(FLOATTYPE angle, const FLOATNAME(LVecBase3) &axis,
   FLOATTYPE length_sq = axis_0 * axis_0 + axis_1 * axis_1 + axis_2 * axis_2;
   nassertv(length_sq != 0.0f);
   FLOATTYPE recip_length = 1.0f/csqrt(length_sq);
-  
+
   axis_0 *= recip_length;
   axis_1 *= recip_length;
   axis_2 *= recip_length;
@@ -288,11 +288,16 @@ set_rotate_mat_normaxis(FLOATTYPE angle, const FLOATNAME(LVecBase3) &axis,
 //     Function: LMatrix4::almost_equal
 //       Access: Public
 //  Description: Returns true if two matrices are memberwise equal
-//               within a specified tolerance.
+//               within a specified tolerance.  This is faster than
+//               the equivalence operator as this doesn't have to
+//               guarantee that it is transitive.
 ////////////////////////////////////////////////////////////////////
 bool FLOATNAME(LMatrix4)::
 almost_equal(const FLOATNAME(LMatrix4) &other, FLOATTYPE threshold) const {
   TAU_PROFILE("bool LMatrix4::almost_equal(const LMatrix4 &, FLOATTYPE)", " ", TAU_USER);
+#ifdef HAVE_EIGEN
+  return ((_m - other._m).cwiseAbs().maxCoeff() < NEARLY_ZERO(FLOATTYPE));
+#else
   return (IS_THRESHOLD_EQUAL((*this)(0, 0), other(0, 0), threshold) &&
           IS_THRESHOLD_EQUAL((*this)(0, 1), other(0, 1), threshold) &&
           IS_THRESHOLD_EQUAL((*this)(0, 2), other(0, 2), threshold) &&
@@ -309,9 +314,9 @@ almost_equal(const FLOATNAME(LMatrix4) &other, FLOATTYPE threshold) const {
           IS_THRESHOLD_EQUAL((*this)(3, 1), other(3, 1), threshold) &&
           IS_THRESHOLD_EQUAL((*this)(3, 2), other(3, 2), threshold) &&
           IS_THRESHOLD_EQUAL((*this)(3, 3), other(3, 3), threshold));
+#endif
 }
 
-
 ////////////////////////////////////////////////////////////////////
 //     Function: LMatrix4::output
 //       Access: Public
@@ -520,7 +525,7 @@ void FLOATNAME(LMatrix4)::
 write_datagram_fixed(Datagram &destination) const {
   for (int i = 0; i < 4; ++i) {
     for (int j = 0; j < 4; ++j) {
-#if FLOATTOKEN == 'f' 
+#if FLOATTOKEN == 'f'
       destination.add_float32(get_cell(i,j));
 #else
       destination.add_float64(get_cell(i,j));

+ 8 - 7
panda/src/linmath/lmatrix4_src.h

@@ -107,6 +107,7 @@ PUBLISHED:
   INLINE_LINMATH static int size();
 
   INLINE_LINMATH bool is_nan() const;
+  INLINE_LINMATH bool is_identity() const;
 
   INLINE_LINMATH FLOATTYPE get_cell(int row, int col) const;
   INLINE_LINMATH void set_cell(int row, int col, FLOATTYPE value);
@@ -205,13 +206,13 @@ PUBLISHED:
   INLINE_LINMATH void
     set_scale_mat(const FLOATNAME(LVecBase3) &scale);
   INLINE_LINMATH void
-    set_shear_mat(const FLOATNAME(LVecBase3) &shear, 
+    set_shear_mat(const FLOATNAME(LVecBase3) &shear,
                   CoordinateSystem cs = CS_default);
   INLINE_LINMATH void
     set_scale_shear_mat(const FLOATNAME(LVecBase3) &scale,
-                        const FLOATNAME(LVecBase3) &shear, 
+                        const FLOATNAME(LVecBase3) &shear,
                         CoordinateSystem cs = CS_default);
-  
+
   INLINE_LINMATH static FLOATNAME(LMatrix4)
     translate_mat(const FLOATNAME(LVecBase3) &trans);
   INLINE_LINMATH static FLOATNAME(LMatrix4)
@@ -232,19 +233,19 @@ PUBLISHED:
     scale_mat(FLOATTYPE scale);
 
   static INLINE_LINMATH FLOATNAME(LMatrix4)
-    shear_mat(const FLOATNAME(LVecBase3) &shear, 
+    shear_mat(const FLOATNAME(LVecBase3) &shear,
               CoordinateSystem cs = CS_default);
   static INLINE_LINMATH FLOATNAME(LMatrix4)
-    shear_mat(FLOATTYPE shxy, FLOATTYPE shxz, FLOATTYPE shyz, 
+    shear_mat(FLOATTYPE shxy, FLOATTYPE shxz, FLOATTYPE shyz,
               CoordinateSystem cs = CS_default);
 
   static INLINE_LINMATH FLOATNAME(LMatrix4)
     scale_shear_mat(const FLOATNAME(LVecBase3) &scale,
-                    const FLOATNAME(LVecBase3) &shear, 
+                    const FLOATNAME(LVecBase3) &shear,
                     CoordinateSystem cs = CS_default);
   static INLINE_LINMATH FLOATNAME(LMatrix4)
     scale_shear_mat(FLOATTYPE sx, FLOATTYPE sy, FLOATTYPE sz,
-                    FLOATTYPE shxy, FLOATTYPE shxz, FLOATTYPE shyz, 
+                    FLOATTYPE shxy, FLOATTYPE shxz, FLOATTYPE shyz,
                     CoordinateSystem cs = CS_default);
 
   INLINE_LINMATH static const FLOATNAME(LMatrix4) &y_to_z_up_mat();

+ 2 - 10
panda/src/pgraph/config_pgraph.cxx

@@ -106,7 +106,8 @@ ConfigVariableBool fake_view_frustum_cull
 ("fake-view-frustum-cull", false,
  PRC_DESC("Set this true to cause culling to be performed by rendering the "
           "object in red wireframe, rather than actually culling it.  This "
-          "helps make culling errors obvious."));
+          "helps make culling errors obvious.  This variable only has an "
+          "effect when Panda is not compiled for a release build."));
 
 ConfigVariableBool clip_plane_cull
 ("clip-plane-cull", true,
@@ -168,15 +169,6 @@ ConfigVariableBool compose_componentwise
           "operations when possible.  If this is false, the compositions "
           "are always computed by matrix."));
 
-ConfigVariableBool uniquify_matrix
-("uniquify-matrix", true,
- PRC_DESC("Set this true to look up arbitarary 4x4 transform matrices in the "
-          "cache, to ensure that two differently-computed transforms that "
-          "happen to encode the same matrix will be "
-          "collapsed into a single pointer.  Nowadays, "
-          "with the transforms stored in a hashtable, we're generally better "
-          "off with this set true."));
-
 ConfigVariableBool paranoid_const
 ("paranoid-const", false,
  PRC_DESC("Set this true to double-check that nothing is inappropriately "

+ 0 - 1
panda/src/pgraph/config_pgraph.h

@@ -41,7 +41,6 @@ extern ConfigVariableBool no_unsupported_copy;
 extern ConfigVariableBool allow_unrelated_wrt;
 extern ConfigVariableBool paranoid_compose;
 extern ConfigVariableBool compose_componentwise;
-extern ConfigVariableBool uniquify_matrix;
 extern ConfigVariableBool paranoid_const;
 extern ConfigVariableBool auto_break_cycles;
 extern EXPCL_PANDA_PGRAPH ConfigVariableBool garbage_collect_states;

+ 0 - 25
panda/src/pgraph/cullBin.I

@@ -26,7 +26,6 @@ CullBin(const CullBin &copy) :
   _cull_this_pcollector(copy._cull_this_pcollector),
   _draw_this_pcollector(copy._draw_this_pcollector)
 {
-  check_flash_color();
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -44,7 +43,6 @@ CullBin(const string &name, CullBin::BinType bin_type,
   _cull_this_pcollector(_cull_bin_pcollector, name),
   _draw_this_pcollector(draw_region_pcollector, name)
 {
-  check_flash_color();
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -66,26 +64,3 @@ INLINE CullBin::BinType CullBin::
 get_bin_type() const {
   return _bin_type;
 }
-
-////////////////////////////////////////////////////////////////////
-//     Function: CullBin::has_flash_color
-//       Access: Public
-//  Description: Returns true if this bin has a flash color configured
-//               via the flash-bin-binname config directive, or false
-//               otherwise.
-////////////////////////////////////////////////////////////////////
-INLINE bool CullBin::
-has_flash_color() const {
-  return _has_flash_color;
-}
-
-////////////////////////////////////////////////////////////////////
-//     Function: CullBin::get_flash_color
-//       Access: Public
-//  Description: If has_flash_color returns true, this returns the
-//               color specified.
-////////////////////////////////////////////////////////////////////
-INLINE const LColor &CullBin::
-get_flash_color() const {
-  return _flash_color;
-}

+ 0 - 34
panda/src/pgraph/cullBin.cxx

@@ -87,40 +87,6 @@ make_result_graph() {
   return root_node;
 }
 
-////////////////////////////////////////////////////////////////////
-//     Function: CullBin::check_flash_color
-//       Access: Private
-//  Description: Checks the config variables for a user variable of
-//               the name flash-bin-binname.  If found, it defines the
-//               r g b color to flash geometry in this bin.
-////////////////////////////////////////////////////////////////////
-void CullBin::
-check_flash_color() {
-#ifdef NDEBUG
-  _has_flash_color = false;
-#else
-  ConfigVariableDouble flash_bin
-    ("flash-bin-" + _name, "", "", ConfigVariable::F_dynamic);
-  if (flash_bin.get_num_words() == 0) {
-    _has_flash_color = false;
-
-  } else if (flash_bin.get_num_words() == 3) {
-    _has_flash_color = true;
-    _flash_color.set(flash_bin[0], flash_bin[1], flash_bin[2], 1.0f);
-
-  } else if (flash_bin.get_num_words() == 4) {
-    _has_flash_color = true;
-    _flash_color.set(flash_bin[0], flash_bin[1], flash_bin[2], flash_bin[3]);
-
-  } else {
-    _has_flash_color = false;
-    pgraph_cat.warning()
-      << "Invalid value for flash-bin-" << _name << ": "
-      << flash_bin.get_string_value() << "\n";
-  }
-#endif  // NDEBUG
-}
-
 ////////////////////////////////////////////////////////////////////
 //     Function: CullBin::ResultGraphBuilder::Constructor
 //       Access: Public

+ 0 - 3
panda/src/pgraph/cullBin.h

@@ -77,9 +77,6 @@ protected:
   BinType _bin_type;
   GraphicsStateGuardianBase *_gsg;
 
-  bool _has_flash_color;
-  LColor _flash_color;
-
   // Used in make_result_graph() and fill_result_graph().
   class ResultGraphBuilder {
   public:

+ 73 - 0
panda/src/pgraph/cullBinManager.I

@@ -284,3 +284,76 @@ set_bin_active(const string &name, bool active) {
   nassertv(bin_index != -1);
   set_bin_active(bin_index, active);
 }
+
+#ifndef NDEBUG
+////////////////////////////////////////////////////////////////////
+//     Function: CullBinManager::get_bin_flash_active
+//       Access: Published
+//  Description: Returns true if the bin with the given bin_index is
+//               configured to flash at a predetermined color (where
+//               bin_index was retrieved by get_bin() or find_bin()).
+//
+//               This method is not available in release builds.
+////////////////////////////////////////////////////////////////////
+INLINE bool CullBinManager::
+get_bin_flash_active(int bin_index) const {
+  nassertr(bin_index >= 0 && bin_index < (int)_bin_definitions.size(), false);
+  return _bin_definitions[bin_index]._flash_active;
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: CullBinManager::get_bin_flash_color
+//       Access: Published
+//  Description: Returns the color that this bin has been configured
+//               to flash to, if configured.
+//
+//               This method is not available in release builds.
+////////////////////////////////////////////////////////////////////
+INLINE const LColor &CullBinManager::
+get_bin_flash_color(int bin_index) const {
+  nassertr(bin_index >= 0 && bin_index < (int)_bin_definitions.size(), LColor::zero());
+  return _bin_definitions[bin_index]._flash_color;
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: CullBinManager::set_bin_flash_active
+//       Access: Published
+//  Description: When set to true, the given bin_index is configured
+//               to flash at a predetermined color (where
+//               bin_index was retrieved by get_bin() or find_bin()).
+//
+//               This method is not available in release builds.
+////////////////////////////////////////////////////////////////////
+INLINE void CullBinManager::
+set_bin_flash_active(int bin_index, bool active) {
+  nassertv(bin_index >= 0 && bin_index < (int)_bin_definitions.size());
+  _bin_definitions[bin_index]._flash_active = active;
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: CullBinManager::set_bin_flash_color
+//       Access: Published
+//  Description: Changes the flash color for the given bin index.
+
+//               This method is not available in release builds.
+////////////////////////////////////////////////////////////////////
+INLINE void CullBinManager::
+set_bin_flash_color(int bin_index, const LColor &color) {
+  nassertv(bin_index >= 0 && bin_index < (int)_bin_definitions.size());
+  _bin_definitions[bin_index]._flash_color = color;
+}
+#endif  // NDEBUG
+
+////////////////////////////////////////////////////////////////////
+//     Function: CullBinManager::get_global_ptr
+//       Access: Published, Static
+//  Description: Returns the pointer to the global CullBinManager
+//               object.
+////////////////////////////////////////////////////////////////////
+INLINE CullBinManager *CullBinManager::
+get_global_ptr() {
+  if (_global_ptr == (CullBinManager *)NULL) {
+    _global_ptr = new CullBinManager;
+  }
+  return _global_ptr;
+}

+ 34 - 25
panda/src/pgraph/cullBinManager.cxx

@@ -109,6 +109,29 @@ add_bin(const string &name, BinType type, int sort) {
   def._sort = sort;
   def._active = true;
 
+#ifndef NDEBUG
+  // Check if there was a flash color configured for this bin name.
+  ConfigVariableDouble flash_bin
+    ("flash-bin-" + name, "", "", ConfigVariable::F_dynamic);
+  if (flash_bin.get_num_words() == 0) {
+    def._flash_active = false;
+
+  } else if (flash_bin.get_num_words() == 3) {
+    def._flash_active = true;
+    def._flash_color.set(flash_bin[0], flash_bin[1], flash_bin[2], 1.0f);
+
+  } else if (flash_bin.get_num_words() == 4) {
+    def._flash_active = true;
+    def._flash_color.set(flash_bin[0], flash_bin[1], flash_bin[2], flash_bin[3]);
+
+  } else {
+    def._flash_active = false;
+    pgraph_cat.warning()
+      << "Invalid value for flash-bin-" << name << ": "
+      << flash_bin.get_string_value() << "\n";
+  }
+#endif
+
   _bins_by_name.insert(BinsByName::value_type(name, new_bin_index));
   _sorted_bins.push_back(new_bin_index);
   _bins_are_sorted = false;
@@ -133,7 +156,7 @@ remove_bin(int bin_index) {
   nassertv(_bin_definitions[bin_index]._in_use);
 
   _bin_definitions[bin_index]._in_use = false;
-  SortedBins::iterator si = 
+  SortedBins::iterator si =
     find(_sorted_bins.begin(), _sorted_bins.end(), bin_index);
   nassertv(si != _sorted_bins.end());
   _sorted_bins.erase(si);
@@ -142,7 +165,7 @@ remove_bin(int bin_index) {
   // Now we have to make sure all of the data objects in the world
   // that had cached this bin index or have a bin object are correctly
   // updated.
-  
+
   // First, tell all the RenderStates in the world to reset their bin
   // index cache.
   RenderState::bin_removed(bin_index);
@@ -170,7 +193,7 @@ find_bin(const string &name) const {
 ////////////////////////////////////////////////////////////////////
 //     Function: CullBinManager::write
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void CullBinManager::
 write(ostream &out) const {
@@ -185,20 +208,6 @@ write(ostream &out) const {
   }
 }
 
-////////////////////////////////////////////////////////////////////
-//     Function: CullBinManager::get_global_ptr
-//       Access: Published, Static
-//  Description: Returns the pointer to the global CullBinManager
-//               object.
-////////////////////////////////////////////////////////////////////
-CullBinManager *CullBinManager::
-get_global_ptr() {
-  if (_global_ptr == (CullBinManager *)NULL) {
-    _global_ptr = new CullBinManager;
-  }
-  return _global_ptr;
-}
-
 ////////////////////////////////////////////////////////////////////
 //     Function: CullBinManager::make_new_bin
 //       Access: Public
@@ -260,11 +269,11 @@ do_sort_bins() {
 void CullBinManager::
 setup_initial_bins() {
   ConfigVariableList cull_bin
-    ("cull-bin", 
+    ("cull-bin",
      PRC_DESC("Creates a new cull bin by name, with the specified properties.  "
               "This is a string in three tokens, separated by whitespace: "
               "'bin_name sort type'."));
-  
+
   // First, add all of the bins specified in the Configrc file.
   int num_bins = cull_bin.get_num_unique_values();
 
@@ -351,26 +360,26 @@ parse_bin_type(const string &bin_type) {
 
 ////////////////////////////////////////////////////////////////////
 //     Function: CullBinManager::BinType output operator
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 ostream &
 operator << (ostream &out, CullBinManager::BinType bin_type) {
   switch (bin_type) {
   case CullBinManager::BT_invalid:
     return out << "invalid";
-    
+
   case CullBinManager::BT_unsorted:
     return out << "unsorted";
-    
+
   case CullBinManager::BT_state_sorted:
     return out << "state_sorted";
-    
+
   case CullBinManager::BT_back_to_front:
     return out << "back_to_front";
-    
+
   case CullBinManager::BT_front_to_back:
     return out << "front_to_back";
-    
+
   case CullBinManager::BT_fixed:
     return out << "fixed";
   }

+ 13 - 2
panda/src/pgraph/cullBinManager.h

@@ -65,9 +65,16 @@ PUBLISHED:
   INLINE void set_bin_active(int bin_index, bool active);
   INLINE void set_bin_active(const string &name, bool active);
 
+#ifndef NDEBUG
+  INLINE bool get_bin_flash_active(int bin_index) const;
+  INLINE const LColor &get_bin_flash_color(int bin_index) const;
+  INLINE void set_bin_flash_active(int bin_index, bool active);
+  INLINE void set_bin_flash_color(int bin_index, const LColor &color);
+#endif
+
   void write(ostream &out) const;
 
-  static CullBinManager *get_global_ptr();
+  INLINE static CullBinManager *get_global_ptr();
 
 public:
   // This interface is only intended to be used by CullResult.
@@ -77,7 +84,7 @@ public:
   // This defines the factory interface for defining constructors to
   // bin types (the implementations are in the cull directory, not
   // here in pgraph, so we can't call the constructors directly).
-  typedef CullBin *BinConstructor(const string &name, 
+  typedef CullBin *BinConstructor(const string &name,
                                   GraphicsStateGuardianBase *gsg,
                                   const PStatCollector &draw_region_pcollector);
 
@@ -95,6 +102,10 @@ private:
     BinType _type;
     int _sort;
     bool _active;
+#ifndef NDEBUG
+    bool _flash_active;
+    LColorf _flash_color;
+#endif
   };
   typedef pvector<BinDefinition> BinDefinitions;
   BinDefinitions _bin_definitions;

+ 1 - 1
panda/src/pgraph/cullPlanes.cxx

@@ -13,12 +13,12 @@
 ////////////////////////////////////////////////////////////////////
 
 #include "cullPlanes.h"
+#include "cullTraverser.h"
 #include "cullTraverserData.h"
 #include "clipPlaneAttrib.h"
 #include "occluderEffect.h"
 #include "boundingBox.h"
 
-
 ////////////////////////////////////////////////////////////////////
 //     Function: CullPlanes::make_empty
 //       Access: Public, Static

+ 41 - 21
panda/src/pgraph/cullResult.cxx

@@ -26,6 +26,7 @@
 #include "renderState.h"
 #include "clockObject.h"
 #include "config_pgraph.h"
+#include "depthOffsetAttrib.h"
 
 TypeHandle CullResult::_type_handle;
 
@@ -114,12 +115,15 @@ add_object(CullableObject *object, const CullTraverser *traverser) {
 
   bool force = !traverser->get_effective_incomplete_render();
   Thread *current_thread = traverser->get_current_thread();
+  CullBinManager *bin_manager = CullBinManager::get_global_ptr();
 
   // Check to see if there's a special transparency setting.
   const RenderState *state = object->_state;
   nassertv(state != (const RenderState *)NULL);
 
-  const TransparencyAttrib *trans = DCAST(TransparencyAttrib, state->get_attrib(TransparencyAttrib::get_class_slot()));
+  const TransparencyAttrib *trans = (const TransparencyAttrib *)
+    state->get_attrib(TransparencyAttrib::get_class_slot());
+
   if (trans != (const TransparencyAttrib *)NULL) {
     switch (trans->get_mode()) {
     case TransparencyAttrib::M_alpha:
@@ -168,7 +172,9 @@ add_object(CullableObject *object, const CullTraverser *traverser) {
       // explicit bin already applied; otherwise, M_dual falls back
       // to M_alpha.
       {
-        const CullBinAttrib *bin_attrib = DCAST(CullBinAttrib, state->get_attrib(CullBinAttrib::get_class_slot()));
+        const CullBinAttrib *bin_attrib = (const CullBinAttrib *)
+          state->get_attrib(CullBinAttrib::get_class_slot());
+
         if (bin_attrib == (CullBinAttrib *)NULL ||
             bin_attrib->get_bin_name().empty()) {
           // We make a copy of the object to draw the transparent part;
@@ -183,10 +189,14 @@ add_object(CullableObject *object, const CullTraverser *traverser) {
               if (transparent_part->munge_geom
                   (_gsg, _gsg->get_geom_munger(transparent_part->_state, current_thread),
                    traverser, force)) {
-                CullBin *bin = get_bin(transparent_part->_state->get_bin_index());
+                int transparent_bin_index = transparent_part->_state->get_bin_index();
+                CullBin *bin = get_bin(transparent_bin_index);
                 nassertv(bin != (CullBin *)NULL);
 #ifndef NDEBUG
-                check_flash_bin(transparent_part->_state, bin);
+                if (bin_manager->get_bin_flash_active(transparent_bin_index)) {
+                  do_flash_bin(transparent_part->_state,
+                    bin_manager->get_bin_flash_color(transparent_bin_index));
+                }
 #endif
                 bin->add_object(transparent_part, current_thread);
               } else {
@@ -215,11 +225,15 @@ add_object(CullableObject *object, const CullTraverser *traverser) {
     }
   }
 
-  CullBin *bin = get_bin(object->_state->get_bin_index());
+  int bin_index = object->_state->get_bin_index();
+  CullBin *bin = get_bin(bin_index);
   nassertv(bin != (CullBin *)NULL);
 
 #ifndef NDEBUG
-  check_flash_bin(object->_state, bin);
+  if (bin_manager->get_bin_flash_active(bin_index)) {
+    do_flash_bin(object->_state,
+      bin_manager->get_bin_flash_color(bin_index));
+  }
 #endif
 
   // Munge vertices as needed for the GSG's requirements, and the
@@ -346,16 +360,20 @@ make_new_bin(int bin_index) {
   CullBinManager *bin_manager = CullBinManager::get_global_ptr();
   PT(CullBin) bin = bin_manager->make_new_bin(bin_index, _gsg,
                                               _draw_region_pcollector);
-  if (bin != (CullBin *)NULL) {
+  CullBin *bin_ptr = bin.p();
+
+  if (bin_ptr != (CullBin *)NULL) {
     // Now store it in the vector.
     while (bin_index >= (int)_bins.size()) {
       _bins.push_back((CullBin *)NULL);
     }
     nassertr(bin_index >= 0 && bin_index < (int)_bins.size(), NULL);
-    _bins[bin_index] = bin;
+
+    // Prevent unnecessary ref/unref by swapping the PointerTos.
+    swap(_bins[bin_index], bin);
   }
 
-  return bin;
+  return bin_ptr;
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -393,25 +411,25 @@ get_binary_state() {
 }
 
 ////////////////////////////////////////////////////////////////////
-//     Function: CullResult::check_flash_bin
+//     Function: CullResult::do-flash_bin
 //       Access: Private
 //  Description: If the user configured flash-bin-binname, then update
 //               the object's state to flash all the geometry in the
 //               bin.
 ////////////////////////////////////////////////////////////////////
 void CullResult::
-check_flash_bin(CPT(RenderState) &state, CullBin *bin) {
-  if (bin->has_flash_color()) {
-    int cycle = (int)(ClockObject::get_global_clock()->get_frame_time() * bin_color_flash_rate);
-    if ((cycle & 1) == 0) {
-      state = state->remove_attrib(TextureAttrib::get_class_slot());
-      state = state->remove_attrib(LightAttrib::get_class_slot());
-      state = state->remove_attrib(ColorScaleAttrib::get_class_slot());
-      state = state->remove_attrib(FogAttrib::get_class_slot());
-      state = state->add_attrib(ColorAttrib::make_flat(bin->get_flash_color()),
-                                RenderState::get_max_priority());
-    }
+do_flash_bin(CPT(RenderState) &state, const LColor &flash_color) {
+#ifndef NDEBUG
+  int cycle = (int)(ClockObject::get_global_clock()->get_frame_time() * bin_color_flash_rate);
+  if ((cycle & 1) == 0) {
+    state = state->remove_attrib(TextureAttrib::get_class_slot());
+    state = state->remove_attrib(LightAttrib::get_class_slot());
+    state = state->remove_attrib(ColorScaleAttrib::get_class_slot());
+    state = state->remove_attrib(FogAttrib::get_class_slot());
+    state = state->add_attrib(ColorAttrib::make_flat(flash_color),
+                              RenderState::get_max_priority());
   }
+#endif  // NDEBUG
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -423,6 +441,7 @@ check_flash_bin(CPT(RenderState) &state, CullBin *bin) {
 ////////////////////////////////////////////////////////////////////
 void CullResult::
 check_flash_transparency(CPT(RenderState) &state, const LColor &transparency) {
+#ifndef NDEBUG
   if (show_transparency) {
     int cycle = (int)(ClockObject::get_global_clock()->get_frame_time() * bin_color_flash_rate);
     if ((cycle & 1) == 0) {
@@ -434,6 +453,7 @@ check_flash_transparency(CPT(RenderState) &state, const LColor &transparency) {
                                 RenderState::get_max_priority());
     }
   }
+#endif
 }
 
 ////////////////////////////////////////////////////////////////////

+ 1 - 1
panda/src/pgraph/cullResult.h

@@ -66,7 +66,7 @@ public:
 
 private:
   CullBin *make_new_bin(int bin_index);
-  void check_flash_bin(CPT(RenderState) &state, CullBin *bin);
+  void do_flash_bin(CPT(RenderState) &state, const LColor &flash_color);
   void check_flash_transparency(CPT(RenderState) &state, const LColor &color);
 
   static CPT(RenderState) get_alpha_state();

+ 60 - 0
panda/src/pgraph/cullTraverser.I

@@ -245,3 +245,63 @@ flush_level() {
   _geoms_pcollector.flush_level();
   _geoms_occluded_pcollector.flush_level();
 }
+
+////////////////////////////////////////////////////////////////////
+//     Function: CullTraverser::do_traverse
+//       Access: Protected
+//  Description: This is implemented inline to reduce recursion.
+////////////////////////////////////////////////////////////////////
+INLINE void CullTraverser::
+do_traverse(CullTraverserData &data) {
+  if (is_in_view(data)) {
+    if (pgraph_cat.is_spam()) {
+      pgraph_cat.spam()
+        << "\n" << data._node_path
+        << " " << data._draw_mask << "\n";
+    }
+
+    PandaNodePipelineReader *node_reader = data.node_reader();
+    int fancy_bits = node_reader->get_fancy_bits();
+
+    if ((fancy_bits & (PandaNode::FB_transform |
+                       PandaNode::FB_state |
+                       PandaNode::FB_effects |
+                       PandaNode::FB_tag |
+                       PandaNode::FB_draw_mask |
+                       PandaNode::FB_cull_callback)) == 0 &&
+        data._cull_planes->is_empty()) {
+      // Nothing interesting in this node; just move on.
+
+    } else {
+      // Something in this node is worth taking a closer look.
+      const RenderEffects *node_effects = node_reader->get_effects();
+      if (node_effects->has_show_bounds()) {
+        // If we should show the bounding volume for this node, make it
+        // up now.
+        show_bounds(data, node_effects->has_show_tight_bounds());
+      }
+
+      data.apply_transform_and_state(this);
+
+      const FogAttrib *fog = (const FogAttrib *)
+        node_reader->get_state()->get_attrib(FogAttrib::get_class_slot());
+
+      if (fog != (const FogAttrib *)NULL && fog->get_fog() != (Fog *)NULL) {
+        // If we just introduced a FogAttrib here, call adjust_to_camera()
+        // now.  This maybe isn't the perfect time to call it, but it's
+        // good enough; and at this time we have all the information we
+        // need for it.
+        fog->get_fog()->adjust_to_camera(get_camera_transform());
+      }
+
+      if (fancy_bits & PandaNode::FB_cull_callback) {
+        PandaNode *node = data.node();
+        if (!node->cull_callback(this, data)) {
+          return;
+        }
+      }
+    }
+
+    traverse_below(data);
+  }
+}

+ 18 - 70
panda/src/pgraph/cullTraverser.cxx

@@ -17,7 +17,6 @@
 #include "cullTraverserData.h"
 #include "transformState.h"
 #include "renderState.h"
-#include "fogAttrib.h"
 #include "colorAttrib.h"
 #include "renderModeAttrib.h"
 #include "cullFaceAttrib.h"
@@ -164,7 +163,7 @@ traverse(const NodePath &root) {
                            _initial_state, _view_frustum,
                            _current_thread);
 
-    traverse(data);
+    do_traverse(data);
   }
 }
 
@@ -177,55 +176,7 @@ traverse(const NodePath &root) {
 ////////////////////////////////////////////////////////////////////
 void CullTraverser::
 traverse(CullTraverserData &data) {
-  if (is_in_view(data)) {
-    if (pgraph_cat.is_spam()) {
-      pgraph_cat.spam()
-        << "\n" << data._node_path
-        << " " << data._draw_mask << "\n";
-    }
-
-    PandaNodePipelineReader *node_reader = data.node_reader();
-    int fancy_bits = node_reader->get_fancy_bits();
-
-    if ((fancy_bits & (PandaNode::FB_transform |
-                       PandaNode::FB_state |
-                       PandaNode::FB_effects |
-                       PandaNode::FB_tag |
-                       PandaNode::FB_draw_mask |
-                       PandaNode::FB_cull_callback)) == 0 &&
-        data._cull_planes->is_empty()) {
-      // Nothing interesting in this node; just move on.
-      traverse_below(data);
-
-    } else {
-      // Something in this node is worth taking a closer look.
-      const RenderEffects *node_effects = node_reader->get_effects();
-      if (node_effects->has_show_bounds()) {
-        // If we should show the bounding volume for this node, make it
-        // up now.
-        show_bounds(data, node_effects->has_show_tight_bounds());
-      }
-
-      data.apply_transform_and_state(this);
-
-      const FogAttrib *fog = DCAST(FogAttrib, node_reader->get_state()->get_attrib(FogAttrib::get_class_slot()));
-      if (fog != (const FogAttrib *)NULL && fog->get_fog() != (Fog *)NULL) {
-        // If we just introduced a FogAttrib here, call adjust_to_camera()
-        // now.  This maybe isn't the perfect time to call it, but it's
-        // good enough; and at this time we have all the information we
-        // need for it.
-        fog->get_fog()->adjust_to_camera(get_camera_transform());
-      }
-
-      if (fancy_bits & PandaNode::FB_cull_callback) {
-        PandaNode *node = data.node();
-        if (!node->cull_callback(this, data)) {
-          return;
-        }
-      }
-      traverse_below(data);
-    }
-  }
+  do_traverse(data);
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -241,27 +192,24 @@ traverse_below(CullTraverserData &data) {
   PandaNodePipelineReader *node_reader = data.node_reader();
   PandaNode *node = data.node();
 
-  bool this_node_hidden = data.is_this_node_hidden(this);
-
-  const RenderEffects *node_effects = node_reader->get_effects();
-  bool has_decal = !this_node_hidden && node_effects->has_decal();
-
-  if (!this_node_hidden) {
+  if (!data.is_this_node_hidden(_camera_mask)) {
     node->add_for_draw(this, data);
-  }
 
-  if (has_decal) {
-    // If we *are* implementing decals with DepthOffsetAttribs,
-    // apply it now, so that each child of this node gets offset by
-    // a tiny amount.
-    data._state = data._state->compose(get_depth_offset_state());
+    // Check for a decal effect.
+    const RenderEffects *node_effects = node_reader->get_effects();
+    if (node_effects->has_decal()) {
+      // If we *are* implementing decals with DepthOffsetAttribs,
+      // apply it now, so that each child of this node gets offset by
+      // a tiny amount.
+      data._state = data._state->compose(get_depth_offset_state());
 #ifndef NDEBUG
-    // This is just a sanity check message.
-    if (!node->is_geom_node()) {
-      pgraph_cat.error()
-        << "DecalEffect applied to " << *node << ", not a GeomNode.\n";
-    }
+      // This is just a sanity check message.
+      if (!node->is_geom_node()) {
+        pgraph_cat.error()
+          << "DecalEffect applied to " << *node << ", not a GeomNode.\n";
+      }
 #endif
+    }
   }
 
   // Now visit all the node's children.
@@ -272,14 +220,14 @@ traverse_below(CullTraverserData &data) {
     int i = node->get_first_visible_child();
     while (i < num_children) {
       CullTraverserData next_data(data, children.get_child(i));
-      traverse(next_data);
+      do_traverse(next_data);
       i = node->get_next_visible_child(i);
     }
 
   } else {
     for (int i = 0; i < num_children; i++) {
       CullTraverserData next_data(data, children.get_child(i));
-      traverse(next_data);
+      do_traverse(next_data);
     }
   }
 }

Some files were not shown because too many files changed in this diff