Browse Source

Merge branch 'master' into interrogate-overhaul

Conflicts:
	dtool/src/cppparser/cppBison.yxx
rdb 11 years ago
parent
commit
34761a0b9b
59 changed files with 408 additions and 6716 deletions
  1. 0 8
      direct/src/autorestart/Sources.pp
  2. 0 901
      direct/src/autorestart/autorestart.c
  3. 1 2
      direct/src/directscripts/Doxyfile.python
  4. 0 29
      direct/src/heapq/Sources.pp
  5. 0 240
      direct/src/heapq/heapq.cxx
  6. 1 1
      direct/src/http/LandingPage.py
  7. 1 1
      direct/src/http/LandingPageHTML.py
  8. 1 1
      direct/src/http/WebRequest.py
  9. 0 504
      direct/src/pyinst/Builder.py
  10. 0 0
      direct/src/pyinst/Sources.pp
  11. 0 0
      direct/src/pyinst/__init__.py
  12. 0 246
      direct/src/pyinst/archive.py
  13. 0 226
      direct/src/pyinst/archive_rt.py
  14. 0 81
      direct/src/pyinst/archivebuilder.py
  15. 0 169
      direct/src/pyinst/bindepend.py
  16. 0 204
      direct/src/pyinst/carchive.py
  17. 0 157
      direct/src/pyinst/carchive_rt.py
  18. 0 178
      direct/src/pyinst/finder.py
  19. 0 138
      direct/src/pyinst/icon.py
  20. 0 487
      direct/src/pyinst/imputil.py
  21. 0 91
      direct/src/pyinst/installutils.py
  22. 0 85
      direct/src/pyinst/ltoc.py
  23. 0 42
      direct/src/pyinst/mkarchive.py
  24. 0 436
      direct/src/pyinst/modulefinder.py
  25. 0 317
      direct/src/pyinst/resource.py
  26. 0 131
      direct/src/pyinst/tocfilter.py
  27. 0 1251
      direct/src/showbase/ElementTree.py
  28. 1 1
      direct/src/showbase/HTMLTree.py
  29. 0 1
      direct/src/showbase/PythonUtil.py
  30. 0 71
      direct/src/test/ModelScreenShot.py
  31. 0 6
      direct/src/test/ModelScreenShotGlobals.py
  32. 0 0
      direct/src/test/Sources.pp
  33. 0 0
      direct/src/test/__init__.py
  34. 25 0
      dtool/src/cppparser/cppBison.yxx
  35. 3 0
      dtool/src/cppparser/cppFunctionType.cxx
  36. 1 0
      dtool/src/cppparser/cppFunctionType.h
  37. 8 2
      dtool/src/cppparser/cppInstanceIdentifier.cxx
  38. 1 0
      dtool/src/cppparser/cppInstanceIdentifier.h
  39. 20 2
      dtool/src/cppparser/cppPreprocessor.cxx
  40. 17 5
      dtool/src/cppparser/cppReferenceType.cxx
  41. 11 2
      dtool/src/cppparser/cppReferenceType.h
  42. 22 19
      dtool/src/dtoolbase/dtoolbase_cc.h
  43. 15 3
      makepanda/makepanda.py
  44. 68 0
      panda/src/express/pointerTo.I
  45. 10 0
      panda/src/express/pointerTo.h
  46. 14 0
      panda/src/express/pointerToBase.I
  47. 4 0
      panda/src/express/pointerToBase.h
  48. 50 9
      panda/src/glstuff/glGraphicsStateGuardian_src.cxx
  49. 2 0
      panda/src/glstuff/glGraphicsStateGuardian_src.h
  50. 4 4
      panda/src/pgraph/config_pgraph.cxx
  51. 6 51
      panda/src/pgraph/cullBin.cxx
  52. 13 61
      panda/src/pgraph/cullResult.cxx
  53. 2 5
      panda/src/pgraph/cullResult.h
  54. 4 4
      panda/src/pgraph/cullTraverser.I
  55. 67 303
      panda/src/pgraph/cullTraverser.cxx
  56. 4 5
      panda/src/pgraph/cullTraverser.h
  57. 22 96
      panda/src/pgraph/cullableObject.I
  58. 4 116
      panda/src/pgraph/cullableObject.cxx
  59. 6 24
      panda/src/pgraph/cullableObject.h

+ 0 - 8
direct/src/autorestart/Sources.pp

@@ -1,8 +0,0 @@
-#begin bin_target
-  // This program only compiles on Unix.
-  #define BUILD_TARGET $[UNIX_PLATFORM]
-  #define C++FLAGS -DWITHIN_PANDA
-
-  #define TARGET autorestart
-  #define SOURCES autorestart.c
-#end bin_target

+ 0 - 901
direct/src/autorestart/autorestart.c

@@ -1,901 +0,0 @@
-/* Filename: autorestart.c
- * Created by:  drose (05Sep02)
- *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- *
- * PANDA 3D SOFTWARE
- * Copyright (c) Carnegie Mellon University.  All rights reserved.
- *
- * All use of this software is subject to the terms of the revised BSD
- * license.  You should have received a copy of this license along
- * with this source code in a file named "LICENSE."
- *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-#ifdef WITHIN_PANDA
-#include "dtoolbase.h"
-#endif
-
-#include <getopt.h>
-#include <stdio.h>
-#include <errno.h>
-#include <string.h>  /* for strerror */
-#include <unistd.h>
-#include <sys/types.h>
-#include <sys/wait.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <time.h>
-#include <signal.h>
-#include <stdlib.h>
-#include <assert.h>
-#include <pwd.h>
-#include <grp.h>
-
-#ifdef HAVE_LIBCURL
-#include <curl/curl.h>
-#endif
-
-/* The maximum number of seconds to wait for a process to go away
-   after issuing SIGTERM.  This is only used in watchdog mode, when -W
-   is provided on the command line. */
-#define MAX_WAITTERM_SEC 10
-
-char **params = NULL;
-char *logfile_name = NULL;
-char *pidfile_name = NULL;
-int dont_fork = 0;
-char *watchdog_url = NULL;
-int watchdog_start_sec = 0;
-int watchdog_cycle_sec = 0;
-int watchdog_timeout_sec = 0;
-char *startup_username = NULL;
-char *startup_groupname = NULL;
-char *startup_chdir = NULL;
-int logfile_fd = -1;
-int stop_on_terminate = 0;
-int stop_always = 0;
-char *respawn_script = NULL;
-int respawn_count_time = 0;
-
-/* If requested, delay these many seconds between restart attempts */
-int respawn_delay_time = 5;
-
-
-/* We shouldn't respawn more than (spam_respawn_count - 1) times over
-   spam_respawn_time seconds. */
-int spam_respawn_count = 5;
-int spam_respawn_time = 60;
-int spam_restart_delay_time = 600;  /* Optionally, do not exit if we spam too much; simply sleep for this many seconds*/
-
-
-
-pid_t child_pid = 0;
-pid_t watchdog_pid = 0;
-
-#define TIME_BUFFER_SIZE 128
-
-/* Keep track of the frequency with which we respawn, so we can report
-   this to our respawn script. */
-typedef struct respawn_record_struct {
-  time_t _time;
-  struct respawn_record_struct *_next;
-} respawn_record;
-
-respawn_record *respawns = NULL;
-
-int
-record_respawn(time_t now) {
-  /* Records the respawning event in the respawn_record, and returns
-     the number of respawns in the last respawn_count_time
-     interval. */
-  respawn_record *rec;
-  respawn_record *next;
-  int count;
-
-  if (respawn_count_time <= 0) {
-    /* We're not tracking respawns if respawn_count_time is 0. */
-    return 0;
-  }
-
-  rec = (respawn_record *)malloc(sizeof(respawn_record));
-  rec->_time = now;
-  rec->_next = respawns;
-  respawns = rec;
-
-  /* Now walk through the rest of the list and count up the number of
-     respawn events until we reach a record more than
-     respawn_count_time seconds old. */
-  count = 0;
-  while (rec->_next != NULL &&
-         (now - rec->_time) <= respawn_count_time) {
-    rec = rec->_next;
-    count++;
-  }
-
-  /* The remaining respawn records get removed. */
-  next = rec->_next;
-  rec->_next = NULL;
-  while (next != NULL) {
-    rec = next;
-    next = rec->_next;
-    free(rec);
-  }
-
-  return count;
-}
-
-void
-invoke_respawn_script(time_t now) {
-  char buffer[32];
-  char *new_command;
-  int new_command_length;
-
-  /* The process is about to be respawned; run the script that we were
-     given on the command line. */
-  if (respawn_count_time <= 0) {
-    /* We're not counting respawn times, so just run the script
-       directly. */
-    system(respawn_script);
-
-  } else {
-    /* We are counting respawn times, so append that information as a
-       parameter to the command. */
-    sprintf(buffer, " %d", record_respawn(now));
-    new_command_length = strlen(respawn_script) + strlen(buffer);
-    new_command = (char *)malloc(new_command_length + 1);
-    strcpy(new_command, respawn_script);
-    strcat(new_command, buffer);
-    assert(strlen(new_command) == new_command_length);
-
-    system(new_command);
-
-    free(new_command);
-  }
-}
-
-/* A callback function passed to libcurl that simply discards the data
-   retrieved from the server.  We only care about the HTTP status. */
-size_t 
-watchdog_bitbucket(void *ptr, size_t size, size_t nmemb, void *userdata) {
-  return size * nmemb;
-}
-
-/* Waits up to timeout_ms for a particular child to terminate.
-   Returns 0 if the timeout expires. */
-pid_t 
-waitpid_timeout(pid_t child_pid, int *status_ptr, int timeout_ms) {
-  pid_t result;
-  struct timeval now, tv;
-  int now_ms, start_ms, elapsed_ms;
-  
-  gettimeofday(&now, NULL);
-  start_ms = now.tv_sec * 1000 + now.tv_usec / 1000;
-    
-  result = waitpid(child_pid, status_ptr, WNOHANG);
-  while (result == 0) {
-    gettimeofday(&now, NULL);
-    now_ms = now.tv_sec * 1000 + now.tv_usec / 1000;
-    elapsed_ms = now_ms - start_ms;
-    
-    if (elapsed_ms > timeout_ms) {
-      /* Tired of waiting. */
-      return 0;
-    }
-    
-    /* Yield the timeslice and wait some more. */
-    tv.tv_sec = 0;
-    tv.tv_usec = 1;
-    select(0, NULL, NULL, NULL, &tv);
-    result = waitpid(child_pid, status_ptr, WNOHANG);
-  }
-  if (result == -1) {
-    perror("waitpid");
-  }
-
-  return result;
-}
-
-
-/* Poll the requested URL until a failure or timeout occurs, or until
-   the child terminates on its own.  Returns 1 on HTTP failure or
-   timeout, 0 on self-termination.  In either case, *status_ptr is
-   filled in with the status value returned by waitpid().*/
-int 
-do_watchdog(int *status_ptr) {
-#ifndef HAVE_LIBCURL
-  fprintf(stderr, "Cannot watchdog; no libcurl available.\n");
-  return 0;
-#else  /* HAVE_LIBCURL */
-
-  CURL *curl;
-  CURLcode res;
-  char error_buffer[CURL_ERROR_SIZE];
-  pid_t wresult;
-
-  // Before we start polling the URL, wait at least start milliseconds.
-  wresult = waitpid_timeout(child_pid, status_ptr, watchdog_start_sec * 1000);
-  if (wresult == child_pid) {
-    // The child terminated on its own before we got started.
-    return 0;
-  }
-
-  curl = curl_easy_init();
-  if (!curl) {
-    fprintf(stderr, "Cannot watchdog; curl failed to init.\n");
-    return 0;
-  }
-
-  curl_easy_setopt(curl, CURLOPT_URL, watchdog_url);
-  /*curl_easy_setopt(curl, CURLOPT_VERBOSE, 1);*/
-  curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, watchdog_timeout_sec * 1000);
-  curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, watchdog_bitbucket);
-  curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, error_buffer);
-  curl_easy_setopt(curl, CURLOPT_USERAGENT, "autorestart");
-  curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
-  curl_easy_setopt(curl, CURLOPT_FRESH_CONNECT, 1);
-  curl_easy_setopt(curl, CURLOPT_FORBID_REUSE, 1);
-
-  res = curl_easy_perform(curl);
-  while (res == 0) {
-    /* 0: The HTTP request finished successfully (but might or might
-       not have returned an error code like a 404). */
-    long http_response = 0;
-    curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &http_response);
-    if ((http_response / 100) != 2) {
-      /* Anything in the 200 range is deemed success.  Anything else
-         is deemed failure. */
-      fprintf(stderr, "%s returned %ld\n", watchdog_url, http_response);
-      break;
-    }
-
-    wresult = waitpid_timeout(child_pid, status_ptr, watchdog_cycle_sec * 1000);
-    if (wresult == child_pid) {
-      /* The process terminated on its own.  Return 0 to indicate this. */
-      return 0;
-    }
-
-    res = curl_easy_perform(curl);
-  }
-
-  curl_easy_cleanup(curl);
-
-  /* Failed to retrieve the watchdog URL. */
-  if (res != 0) {
-    fprintf(stderr, "Failed to contact %s: %s\n", watchdog_url, error_buffer);
-  }
-  
-  /* Kill the child process and wait for it to go away. */
-  kill(child_pid, SIGTERM);
-
-  pid_t result = waitpid_timeout(child_pid, status_ptr, MAX_WAITTERM_SEC * 1000);
-  if (result != child_pid) {
-    if (result == -1) {
-      perror("waitpid");
-    } else {
-      /* SIGTERM didn't make the process die.  Try SIGKILL. */
-      fprintf(stderr, "Force-killing child process\n");
-      kill(child_pid, SIGKILL);
-      result = waitpid_timeout(child_pid, status_ptr, MAX_WAITTERM_SEC * 1000);
-      if (result == -1) {
-        perror("waitpid");
-      }
-    }
-  }
-
-  /* Return 1 to indicate we killed the child due to an HTTP error. */
-  return 1;
-#endif  /* HAVE_LIBCURL */
-}
-
-void
-exec_process() {
-  /* First, output the command line to the log file. */
-  char **p;
-  for (p = params; *p != NULL; ++p) {
-    fprintf(stderr, "%s ", *p);
-  }
-  fprintf(stderr, "\n");
-  execvp(params[0], params);
-  fprintf(stderr, "Cannot exec %s: %s\n", params[0], strerror(errno));
-
-  /* Exit with a status of 0, to indicate to the parent process that
-     we should stop. */
-  exit(0); 
-}
-
-int
-spawn_process() {
-  /* Spawns the child process.  Returns true if the process terminated
-     by itself and should be respawned, false if it was explicitly
-     killed (or some other error condition exists), and it should not
-     respawn any more. */
-  pid_t wresult;
-  int status;
-  int error_exit;
-
-  child_pid = fork();
-  if (child_pid < 0) {
-    /* Fork error. */
-    perror("fork");
-    return 0;
-  }
-
-  if (child_pid == 0) {
-    /* Child.  Exec the process. */
-    fprintf(stderr, "Child pid is %d.\n", getpid());
-    exec_process();
-    /* Shouldn't get here. */
-    exit(1);
-  }
-
-  /* Parent. */
-
-  error_exit = 0;
-
-  if (watchdog_url != NULL) {
-    /* If we're watchdogging, then go check the URL.  This function
-       won't return until the URL fails or the child exits. */
-    error_exit = do_watchdog(&status);
-
-  } else {
-    /* If we're not watchdogging, then just wait for the child to
-       terminate, and diagnose the reason. */
-    wresult = waitpid(child_pid, &status, 0);
-    if (wresult < 0) {
-      perror("waitpid");
-      return 0;
-    }
-  }
-
-  /* Now that we've returned from waitpid, clear the child pid number
-     so our signal handler doesn't get too confused. */
-  child_pid = 0;
-
-  if (error_exit) {
-    /* An HTTP error exit is a reason to respawn. */
-    return 1;
-
-  } else if (WIFSIGNALED(status)) {
-    int signal = WTERMSIG(status);
-    fprintf(stderr, "\nprocess caught signal %d.\n\n", signal);
-    /* A signal exit is a reason to respawn unless the signal is TERM
-       or KILL. */
-    return !stop_on_terminate || (signal != SIGTERM && signal != SIGKILL);
-
-  } else {
-    int exit_status = WEXITSTATUS(status);
-    fprintf(stderr, "\nprocess exited with status %d.\n\n", WEXITSTATUS(status));
-    /* Normal exit is a reason to respawn if the status indicates failure. */
-    return !stop_on_terminate || (exit_status != 0);
-  }
-}
-
-void
-sigterm_handler() {
-  pid_t wresult;
-  int status;
-  time_t now;
-  char time_buffer[TIME_BUFFER_SIZE];
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-
-  fprintf(stderr, "\nsigterm caught at %s; shutting down.\n", time_buffer);
-  if (child_pid == 0) {
-    fprintf(stderr, "no child process.\n\n");
-
-  } else {
-    kill(child_pid, SIGTERM);
-
-    wresult = waitpid(child_pid, &status, 0);
-    if (wresult < 0) {
-      perror("waitpid");
-    } else {
-      fprintf(stderr, "child process terminated.\n\n");
-    }
-  }
-  exit(1);
-}
-
-void
-sighup_handler() {
-  time_t now;
-  char time_buffer[TIME_BUFFER_SIZE];
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-
-  fprintf(stderr, "\nsighup caught at %s.\n", time_buffer);
-  if (child_pid == 0) {
-    fprintf(stderr, "no child process.\n\n");
-
-  } else {
-    kill(child_pid, SIGHUP);
-  }
-}
-
-void 
-sigalarm_handler() {
-  fprintf(stderr, "sleep epoch was complete.\n");
-}
-
-void
-do_autorestart() {
-  char time_buffer[TIME_BUFFER_SIZE];
-  time_t now;
-  time_t *spam_respawn = NULL;
-  int sri, num_sri;
-  struct sigaction sa;
-
-  if (spam_respawn_count > 1) {
-    spam_respawn = (time_t *)malloc(sizeof(time_t) * spam_respawn_count);
-  }
-
-  /* Make our process its own process group. */
-  setpgid(0, 0);
-
-  /* Set up a signal handler to trap SIGTERM. */
-  sa.sa_handler = sigterm_handler;
-  sigemptyset(&sa.sa_mask);
-  sa.sa_flags = 0;
-  if (sigaction(SIGTERM, &sa, NULL) < 0) {
-    perror("sigaction");
-  }
-
-  /* Set up a signal handler to trap SIGHUP.  We pass this into the
-     child. */
-  sa.sa_handler = sighup_handler;
-  sigemptyset(&sa.sa_mask);
-  sa.sa_flags = 0;
-  if (sigaction(SIGHUP, &sa, NULL) < 0) {
-    perror("sigaction");
-  }
-
-  if (logfile_fd >= 0) {
-    /* If we have a logfile, dup it onto stdout and stderr. */
-    dup2(logfile_fd, STDOUT_FILENO);
-    dup2(logfile_fd, STDERR_FILENO);
-    close(logfile_fd);
-  }
-
-  /* Make sure stdin is closed. */
-  close(STDIN_FILENO);
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-  fprintf(stderr, "autorestart begun at %s.\n", time_buffer);
-
-  if (pidfile_name != NULL) {
-    unlink(pidfile_name);
-    FILE *pidfile = fopen(pidfile_name, "w");
-    if (pidfile == NULL) {
-      fprintf(stderr, "Could not write pidfile %s\n", pidfile_name);
-    } else {
-      fprintf(pidfile, "%d\n", getpid());
-      fclose(pidfile);
-    }
-  }
-
-  sri = 1;
-  num_sri = 1;
-  if (spam_respawn_count > 1) {
-    spam_respawn[1] = now;
-  }
-  
-  while (spawn_process()) {
-    now = time(NULL);
-
-    if (respawn_script != NULL) {
-      invoke_respawn_script(now);
-    }
-    
-    if (respawn_delay_time) {
-      sleep(respawn_delay_time);
-    }
-
-    /* Make sure we're not respawning too fast. */
-    if (spam_respawn_count > 1) {
-      sri = (sri + 1) % spam_respawn_count;
-      spam_respawn[sri] = now;
-      if (num_sri < spam_respawn_count) {
-        num_sri++;
-      } else {
-        time_t last = spam_respawn[(sri + 1) % spam_respawn_count];
-        if (now - last < spam_respawn_time) 
-        {
-          if(!spam_restart_delay_time) 
-          {
-            fprintf(stderr, "respawning too fast, giving up.\n");
-            break;
-          } 
-          else 
-          {
-            num_sri = 1; /* reset num_sri */
-            fprintf(stderr, "respawning too fast, will sleep for %d seconds.\n", spam_restart_delay_time);
-            signal (SIGALRM, sigalarm_handler);
-            alarm(spam_restart_delay_time);
-            pause();
-            signal (SIGALRM, SIG_IGN);
-          }
-        }
-      }
-    }
-    
-    if (stop_always) {
-      fprintf(stderr, "instructed to not autorestart, exiting.\n");
-      break;
-    }
-      
-    strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-    fprintf(stderr, "respawning at %s.\n", time_buffer);
-  }
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-  fprintf(stderr, "autorestart terminated at %s.\n", time_buffer);
-  exit(0);
-}
-
-void
-double_fork() {
-  pid_t child, grandchild, wresult;
-  int status;
-
-  /* Fork once, then again, to disassociate the child from the command
-     shell process group. */
-  child = fork();
-  if (child < 0) {
-    /* Failure to fork. */
-    perror("fork");
-    exit(1);
-  }
-
-  if (child == 0) {
-    /* Child.  Fork again. */
-    grandchild = fork();
-    if (grandchild < 0) {
-      perror("fork");
-      exit(1);
-    }
-
-    if (grandchild == 0) {
-      /* Grandchild.  Begin useful work. */
-      do_autorestart();
-      /* Shouldn't get here. */
-      exit(1);
-    }
-
-    /* Child.  Report the new pid, then terminate gracefully. */
-    fprintf(stderr, "Spawned, monitoring pid is %d.\n", grandchild);
-    exit(0);
-  }
-
-  /* Parent.  Wait for the child to terminate, then return. */
-  wresult = waitpid(child, &status, 0);
-  if (wresult < 0) {
-    perror("waitpid");
-    exit(1);
-  }
-
-  if (!WIFEXITED(status)) {
-    if (WIFSIGNALED(status)) {
-      fprintf(stderr, "child caught signal %d unexpectedly.\n", WTERMSIG(status));
-    } else {
-      fprintf(stderr, "child exited with status %d.\n", WEXITSTATUS(status));
-    }
-    exit(1);
-  }
-}
-
-void
-usage() {
-  fprintf(stderr,
-          "\n"
-          "autorestart [opts] program [args . . . ]\n"
-          "autorestart -h\n\n");
-}
-
-void
-help() {
-  usage();
-  fprintf(stderr,
-          "This program is used to run a program as a background task and\n"
-          "automatically restart it should it terminate for any reason other\n"
-          "than normal exit or explicit user kill.\n\n"
-
-          "If the program exits with a status of 0, indicating successful\n"
-          "completion, it is not restarted.\n\n"
-
-          "If the program is terminated via a TERM or KILL signal (e.g. via\n"
-          "kill [pid] or kill -9 [pid]), it is assumed the user meant for the\n"
-          "process to stop, and it is not restarted.\n\n"
-
-          "Options:\n\n"
-
-          "  -l logfilename\n"
-          "     Route stdout and stderr from the child process into the indicated\n"
-          "     log file.\n\n"
-
-          "  -p pidfilename\n"
-          "     Write the pid of the monitoring process to the indicated pidfile.\n\n"
-          "  -f\n"
-          "     Don't fork autorestart itself; run it as a foreground process. \n"
-          "     (Normally, autorestart forks itself to run as a background process.)\n"
-          "     In this case, the file named by -p is not used.\n\n"
-          
-          "  -n\n"
-          "     Do not attempt to restart the process under any circumstance.\n"
-          "     The program can still be used to execute a script on abnormal\n"
-          "     process termination.\n\n"
-
-          "  -t\n"
-          "     Stop on terminate: don't restart if the child process exits\n"
-          "     normally or is killed with a SIGTERM.  With this flag, the\n"
-          "     child process will be restarted only if it exits with a\n"
-          "     non-zero exit status, or if it is killed with a signal other\n"
-          "     than SIGTERM.  Without this flag, the default behavior is to\n"
-          "     restart the child process if it exits for any reason.\n\n"
-
-          "  -r count,secs,sleep\n"
-          "     Sleep 'sleep' seconds if the process respawns 'count' times\n"
-          "     within 'secs' seconds.  This is designed to prevent respawning\n"
-          "     from using too many system resources if something is wrong with\n"
-          "     the child process.  The default value is %d,%d,%d. Use -r 0,0,0\n"
-          "     to disable this feature.\n\n"
-
-          "  -s \"command\"\n"
-          "     Run the indicated command or script each time the process is\n"
-          "     respawned, using the system() call.  This may be useful, for\n"
-          "     instance, to notify an operator via email each time a respawn\n"
-          "     occurs.  If -c is also specified, an additional parameter will\n"
-          "     be appended to the command, indicating the number of times the\n"
-          "     respawn has occurred in the given time interval.\n\n"
-
-          "  -c secs\n"
-          "     Specifies the number of seconds over which to count respawn events\n"
-          "     for the purposes of passing an argument to the script named with\n"
-          "     -s.\n\n"
-
-          "  -d secs\n"
-          "     Specifies the number of seconds to delay for between restarts.\n"
-          "     The default is %d.\n\n"
-
-#ifdef HAVE_LIBCURL
-          "  -W watchdog_url,start,cycle,timeout\n"
-          "     Specifies an optional URL to watch while waiting for the process\n"
-          "     to terminate.  If this is specified, autorestart will start the process,\n"
-          "     wait start seconds, and then repeatedly poll the indicated URL\n"
-          "     every cycle seconds.  If a HTTP failure code is detected,\n"
-          "     or no response is received within timeout seconds, then the\n"
-          "     child is terminated and restarted.  The start, cycle, and timeout\n"
-          "     parameters are all required.\n\n"
-#endif  /* HAVE_LIBCURL */
-
-          "  -U username\n"
-          "     Change to the indicated user upon startup.  The logfile is still\n"
-          "     created as the initial user.\n\n"
-
-          "  -G groupname\n"
-          "     Change to the indicated group upon startup.\n\n"
-
-          "  -D dirname\n"
-          "     Change to the indicated working directory upon startup.  The logfile\n"
-          "     is still created relative to the initial startup directory.\n\n"
-
-          "  -h\n"
-          "     Output this help information.\n\n",
-          spam_respawn_count, spam_respawn_time, spam_restart_delay_time, respawn_delay_time);
-}
-
-void
-parse_int_triplet(char *param, int *a, int *b, int *c) {
-  char *comma;
-  char *comma2;
-  
-  comma = strchr(param, ',');
-  if (comma == NULL) {
-    fprintf(stderr, "Comma required: %s\n", param);
-    exit(1);
-  }
-
-  comma2 = strchr(comma+1, ',');
-  if (comma2 == NULL) {
-    fprintf(stderr, "Second comma required: %s\n", param);
-    exit(1);
-  }
-
-  *comma = '\0';
-  *comma2 = '\0';
-  
-  *a = atoi(param);
-  *b = atoi(comma + 1);
-  *c = atoi(comma2 + 1);
-}
-
-void 
-parse_watchdog(char *param) {
-  char *comma;
-  char *comma2;
-  char *comma3;
-
-#ifndef HAVE_LIBCURL
-  fprintf(stderr, "-W requires autorestart to have been compiled with libcurl support.\n");
-  exit(1);
-#endif  /* HAVE_LIBCURL */
-
-  comma = strrchr(param, ',');
-  if (comma == NULL) {
-    fprintf(stderr, "Comma required: %s\n", param);
-    exit(1);
-  }
-  *comma = '\0';
-
-  comma2 = strrchr(param, ',');
-  if (comma2 == NULL) {
-    *comma = ',';
-    fprintf(stderr, "Second comma required: %s\n", param);
-    exit(1);
-  }
-  *comma2 = '\0';
-
-  comma3 = strrchr(param, ',');
-  if (comma3 == NULL) {
-    *comma = ',';
-    *comma2 = ',';
-    fprintf(stderr, "Third comma required: %s\n", param);
-    exit(1);
-  }
-  *comma3 = '\0';
-
-  watchdog_url = param;
-  watchdog_start_sec = atoi(comma3 + 1);
-  watchdog_cycle_sec = atoi(comma2 + 1);
-  watchdog_timeout_sec = atoi(comma + 1);
-}
-
-
-int 
-main(int argc, char *argv[]) {
-  extern char *optarg;
-  extern int optind;
-  /* The initial '+' instructs GNU getopt not to reorder switches. */
-  static const char *optflags = "+l:p:fntr:s:c:d:W:U:G:D:h";
-  int flag;
-
-  flag = getopt(argc, argv, optflags);
-  while (flag != EOF) {
-    switch (flag) {
-    case 'l':
-      logfile_name = optarg;
-      break;
-
-    case 'p':
-      pidfile_name = optarg;
-      break;
-
-    case 'f':
-      dont_fork = 1;
-      break;
-
-    case 'n':
-      stop_always = 1;
-      break;
-
-    case 't':
-      stop_on_terminate = 1;
-      break;
-
-    case 'r':
-      parse_int_triplet(optarg, &spam_respawn_count, &spam_respawn_time, &spam_restart_delay_time);
-      break;
-
-    case 's':
-      respawn_script = optarg;
-      break;
-
-    case 'c':
-      respawn_count_time = atoi(optarg);
-      break;
-
-    case 'd':
-      respawn_delay_time = atoi(optarg);
-      break;
-
-    case 'W':
-      parse_watchdog(optarg);
-      break;
-
-    case 'U':
-      startup_username = optarg;
-      break;
-
-    case 'G':
-      startup_groupname = optarg;
-      break;
-
-    case 'D':
-      startup_chdir = optarg;
-      break;
-      
-    case 'h':
-      help();
-      return 1;
-
-    case '?':
-    case '+':
-      usage();
-      return 1;
-
-    default:
-      fprintf(stderr, "Unhandled switch: -%c\n", flag);
-      return 1;
-    }
-    flag = getopt(argc, argv, optflags);
-  }
-
-  argc -= (optind - 1);
-  argv += (optind - 1);
-
-  if (argc < 2) {
-    fprintf(stderr, "No program to execute given.\n");
-    usage();
-    return 1;
-  }
-
-  params = &argv[1];
-
-  if (logfile_name != NULL) {
-    logfile_fd = open(logfile_name, O_WRONLY | O_CREAT | O_TRUNC, 0666);
-    if (logfile_fd < 0) {
-      fprintf(stderr, "Cannot write to logfile %s: %s\n", 
-              logfile_name, strerror(errno));
-      return 1;
-    }
-    fprintf(stderr, "Generating output to %s.\n", logfile_name);
-  }
-
-  if (startup_chdir != NULL) {
-    if (chdir(startup_chdir) != 0) {
-      perror(startup_chdir);
-      return 1;
-    }
-  }
-
-  if (startup_groupname != NULL) {
-    struct group *grp;
-    grp = getgrnam(startup_groupname);
-    if (grp == NULL) {
-      perror(startup_groupname);
-      return 1;
-    }
-
-    if (setgid(grp->gr_gid) != 0) {
-      perror(startup_groupname);
-      return 1;
-    }
-  }
-
-  if (startup_username != NULL) {
-    struct passwd *pwd;
-    pwd = getpwnam(startup_username);
-    if (pwd == NULL) {
-      perror(startup_username);
-      return 1;
-    }
-
-    if (setuid(pwd->pw_uid) != 0) {
-      perror(startup_username);
-      return 1;
-    }
-  }
-
-  if (dont_fork) {
-    do_autorestart();
-  } else {
-    double_fork();
-  }
-
-  return 0;
-}
-

+ 1 - 2
direct/src/directscripts/Doxyfile.python

@@ -643,8 +643,7 @@ RECURSIVE              = YES
 # excluded from the INPUT source files. This way you can easily exclude a 
 # subdirectory from a directory tree whose root is specified with the INPUT tag.
 
-EXCLUDE                = built/direct/test \
-                         built/direct/plugin \
+EXCLUDE                = built/direct/plugin \
                          built/direct/plugin_npapi \
                          built/direct/plugin_activex \
                          built/direct/plugin_installer \

+ 0 - 29
direct/src/heapq/Sources.pp

@@ -1,29 +0,0 @@
-// DIR_TYPE "metalib" indicates we are building a shared library that
-// consists mostly of references to other shared libraries.  Under
-// Windows, this directly produces a DLL (as opposed to the regular
-// src libraries, which don't produce anything but a pile of OBJ files
-// under Windows).
-
-#define DIR_TYPE metalib
-
-// This directory strictly contains a Python utility; therefore, only
-// build it if we actually have Python.
-#define BUILD_DIRECTORY $[HAVE_PYTHON]
-
-
-#define OTHER_LIBS \
-  pandaexpress:m \
-  p3dconfig:c p3dtoolconfig:m \
-  p3dtoolutil:c p3dtoolbase:c p3prc:c p3dtool:m
-
-#begin metalib_target
-  #define TARGET p3heapq
-
-  // Tell ppremake to treat this file as if it had been generated via
-  // interrogate.  On OSX, this will move it into the .so, instead of
-  // the .dylib, so that it can be imported into Python.
-  #define PYTHON_MODULE_ONLY 1
-
-  #define SOURCES heapq.cxx
-#end metalib_target
-

+ 0 - 240
direct/src/heapq/heapq.cxx

@@ -1,240 +0,0 @@
-
-/* Note: This module can probably go away when we upgrade to Python 2.4.
-   Python 2.3 has a heapq implementation, but it is in Python. This is
-   reported to be about 20x faster. In 2.4 they reimplemented heapq in C so
-   it should be comparable to this. At this time though, Python 2.4 is
-   still in alpha.
-   
-   Note: This code has been bastardized to only work on Tasks temporarily.
-
-*/
-
-#include <Python.h>
-
-/* Prototypes */
-static PyObject * heappush(PyObject *self, PyObject *args);
-static PyObject * heappop(PyObject *self, PyObject *args);
-static PyObject * heapreplace(PyObject *self, PyObject *args);
-static PyObject * heapify(PyObject *self, PyObject *args);
-static int _siftdown(PyObject *list, int startpos, int pos);
-static int _siftup(PyObject *list, int pos);
-
-#ifdef _WIN32
-extern "C" __declspec(dllexport) void initlibheapq(void);
-extern "C" __declspec(dllexport) void initlibp3heapq(void);
-#else
-extern "C" void initlibheapq();
-extern "C" void initlibp3heapq();
-#endif
-
-static PyObject *
-heappush(PyObject *self, PyObject *args) {
-    int len;
-    PyObject *list = NULL;
-    PyObject *node = NULL;
-    
-    if (!PyArg_ParseTuple(args,"O!O",&PyList_Type,&list,&node))
-        return NULL;
-
-    len = PyList_Size(list);
-    if (PyList_Append(list,node))
-        return NULL;
-    
-    if (_siftdown(list,0,len))
-        return NULL;
-    
-    Py_INCREF(Py_None);
-    return Py_None;
-}
-
-static PyObject *
-heappop(PyObject *self, PyObject *args) {
-    PyObject *list = NULL;
-    PyObject *node = NULL;
-    PyObject *returnNode = NULL;
-    int len;
-    
-    if (!PyArg_ParseTuple(args,"O!",&PyList_Type,&list))
-        return NULL;
-
-    len = PyList_Size(list);
-    if (len == 0) {
-        /* Special-case most common failure cause */
-        PyErr_SetString(PyExc_IndexError, "pop from empty list");
-        return NULL;
-    }
-
-    node = PySequence_GetItem(list,-1);
-    PySequence_DelItem(list,-1);
-
-    len -= 1;
-    if (len > 0) {
-        returnNode = PySequence_GetItem(list,0);
-        PyList_SetItem(list,0,node);
-        if (_siftup(list,0))
-            return NULL;
-    } else {
-        returnNode = node;
-    }
-    
-    return returnNode;
-}
-
-static PyObject * 
-heapreplace(PyObject *self, PyObject *args) {
-    PyObject *list = NULL;
-    PyObject *node = NULL;
-    PyObject *returnNode = NULL;
-    int len;
-    
-    if (!PyArg_ParseTuple(args,"O!O",&PyList_Type,&list,&node))
-        return NULL;
-
-    len = PyList_Size(list);
-    if (len == 0) {
-        /* Special-case most common failure cause */
-        PyErr_SetString(PyExc_IndexError, "replace on an empty list");
-        return NULL;
-    }
-
-    returnNode = PySequence_GetItem(list,0);
-    PySequence_SetItem(list,0,node);
-    if (_siftup(list,0))
-        return NULL;
-
-    return returnNode;
-}
-
-static PyObject *
-heapify(PyObject *self, PyObject *args) {
-    int n, i;
-    PyObject *list;
-
-    if (!PyArg_ParseTuple(args,"O!",&PyList_Type,&list))
-        return NULL;
-    n = (PyList_Size(list)/2)-1;
-    
-    for (i=n;i>=0;i--) {
-        if (_siftup(list,i))
-            return NULL;
-    }
-
-    Py_INCREF(Py_None);
-    return Py_None;
-}
-
-static int
-_siftdown(PyObject *list, int startpos, int pos) {
-    PyObject *newitem, *parent;
-    int parentpos;
-
-    newitem = PySequence_GetItem(list,pos);
-
-    PyObject *newitem_wakeTime_obj = PyObject_GetAttrString(newitem, "wakeTime");
-    double newitem_wakeTime = 0.0;
-    if (newitem_wakeTime_obj != NULL) {
-      newitem_wakeTime = PyFloat_AS_DOUBLE(newitem_wakeTime_obj);
-      Py_DECREF(newitem_wakeTime_obj);
-    }
-
-    while (pos > startpos) {
-        parentpos = (pos - 1) >> 1;
-        parent = PyList_GetItem(list,parentpos);
-
-        /*
-        cmp = PyObject_RichCompareBool(parent,newitem,Py_LE);
-        if (cmp > 0)
-            break;
-        else if (cmp < 0)
-            return -1;
-        */
-
-        PyObject *parent_wakeTime_obj = PyObject_GetAttrString(parent, "wakeTime");
-        double parent_wakeTime = 0.0;
-        if (parent_wakeTime_obj != NULL) {
-          parent_wakeTime = PyFloat_AS_DOUBLE(parent_wakeTime_obj);
-          Py_DECREF(parent_wakeTime_obj);
-        }
-
-        if (parent_wakeTime <= newitem_wakeTime) {
-          break;
-        }
-
-        Py_INCREF(parent);
-        PyList_SetItem(list,pos,parent);
-        pos = parentpos;
-    }
-    PyList_SetItem(list,pos,newitem);
-    return 0;
-}
-
-static int
-_siftup(PyObject *list, int pos) {
-    PyObject *newitem, *right, *child;
-    int endpos, rightpos, childpos;
-    int startpos = pos;
-    
-    endpos = PyList_Size(list);
-    newitem = PySequence_GetItem(list,pos);
-    
-    childpos = (2*pos)+1;
-    while (childpos < endpos) {
-        rightpos = childpos + 1;
-        child = PySequence_Fast_GET_ITEM(list,childpos);
-
-        PyObject *child_wakeTime_obj = PyObject_GetAttrString(child, "wakeTime");
-        double child_wakeTime = 0.0;
-        if (child_wakeTime_obj != NULL) {
-          child_wakeTime = PyFloat_AS_DOUBLE(child_wakeTime_obj);
-          Py_DECREF(child_wakeTime_obj);
-        }
-
-
-        if (rightpos < endpos) {
-            right = PySequence_Fast_GET_ITEM(list,rightpos);
-
-            PyObject *right_wakeTime_obj = PyObject_GetAttrString(right, "wakeTime");
-            double right_wakeTime = 0.0;
-            if (right_wakeTime_obj != NULL) {
-              right_wakeTime = PyFloat_AS_DOUBLE(right_wakeTime_obj);
-              Py_DECREF(right_wakeTime_obj);
-            }
-
-            /*
-            cmp = PyObject_RichCompareBool(right,child,Py_LE);
-            if (cmp > 0)
-              childpos = rightpos;
-            else if (cmp < 0)
-              return -1;
-            */
-
-            if (right_wakeTime <= child_wakeTime) {
-              childpos = rightpos;
-            }
-        }
-        child = PySequence_GetItem(list,childpos);
-        PyList_SetItem(list,pos,child);
-        pos = childpos;
-        childpos = (2*pos)+1;
-    }
-    PyList_SetItem(list,pos,newitem);
-
-    return _siftdown(list,startpos,pos);
-}
-
-static PyMethodDef heapqcMethods[] = {
-    {"heappush",heappush,METH_VARARGS},
-    {"heappop",heappop,METH_VARARGS},
-    {"heapreplace",heapreplace,METH_VARARGS},
-    {"heapify",heapify,METH_VARARGS},
-    {NULL, NULL} /* Sentinel */
-};
-
-void initlibheapq(void) {
-    (void) Py_InitModule("libheapq", heapqcMethods);
-};
-
-void initlibp3heapq(void) {
-    (void) Py_InitModule("libp3heapq", heapqcMethods);
-};
-

+ 1 - 1
direct/src/http/LandingPage.py

@@ -3,7 +3,7 @@ from direct.directnotify.DirectNotifyGlobal import directNotify
 from pandac.PandaModules import VirtualFileSystem
 from pandac.PandaModules import Filename
 from pandac.PandaModules import DSearchPath
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 import LandingPageHTML
 from StringIO import StringIO
 

+ 1 - 1
direct/src/http/LandingPageHTML.py

@@ -1,6 +1,6 @@
 # -- Text content for the landing page.  You should change these for yours! --
 
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 
 title = "Landing Page"
 defaultTitle = title

+ 1 - 1
direct/src/http/WebRequest.py

@@ -4,7 +4,7 @@ from direct.directnotify.DirectNotifyGlobal import directNotify
 from direct.task.TaskManagerGlobal import taskMgr
 from direct.task import Task
 from LandingPage import LandingPage
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 
 notify = directNotify.newCategory('WebRequestDispatcher')
 

+ 0 - 504
direct/src/pyinst/Builder.py

@@ -1,504 +0,0 @@
-import string
-import pprint
-import sys
-import os
-import ConfigParser
-import pprint
-import shutil
-import tempfile
-import ltoc
-import tocfilter
-import resource
-import archive
-import archivebuilder
-import carchive
-
-logfile = None
-autopath = []
-built = {}
-copyFile = None
-
-class Target:
-    def __init__(self, cfg, sectnm, cnvrts):
-        self.children = []
-        self._dependencies = ltoc.lTOC() # the stuff an outer package will need to use me
-        self.cfg = cfg
-        self.__name__ = 'joe'
-        for optnm in cfg.options(sectnm):
-            cnvrt = cnvrts.get(optnm, 'getstringlist')
-            if cnvrt:
-                f = getattr(self, cnvrt, None)
-                if f:
-                    self.__dict__[optnm] = f(cfg.get(sectnm, optnm))
-        if not hasattr(self, 'name'):
-            self.name = self.__name__
-        print "Initializing", self.__name__
-        self.pathprefix = autopath + self.pathprefix
-        self.pathprefix.append(os.path.join(pyinsthome, 'support'))
-        for z in self.zlib:
-            if z in self.cfg.sections():
-                self.children.append(z)
-            else:
-                raise ValueError, "%s - zlib '%s' does not refer to a sections" \
-                      % (self.name, z)
-        for i in range(len(self.misc)):
-            x = self.misc[i]
-            if x in self.cfg.sections():
-                if self.cfg.get(x, "type") == 'PYZ':
-                    self.zlib.append(x)
-                    self.misc[i] = None
-                self.children.append(x)
-        self.misc = filter(None, self.misc)
-        self.edit()
-        self.toc = ltoc.lTOC()
-        for thingie in self.excludes:
-            try:
-                fltr = tocfilter.makefilter(thingie, self.pathprefix)
-            except ValueError:
-                print "Warning: '%s' not found - no filter created" % thingie
-            else:
-                self.toc.addFilter(fltr)
-        if self.exstdlib:
-            self.toc.addFilter(tocfilter.StdLibFilter())
-        if self.extypes:
-            self.toc.addFilter(tocfilter.ExtFilter(self.extypes))
-        if self.expatterns:
-            self.toc.addFilter(tocfilter.PatternFilter(self.expatterns))
-
-        ##------utilities------##
-    def dump(self):
-        logfile.write("---- %s: %s -----\n" % (self.__class__.__name__, self.name))
-        pprint.pprint(self.__dict__, logfile)
-    def getstringlist(self, opt):
-        tmp = string.split(opt, ',')
-        return filter(None, map(string.strip, tmp))
-    def getstring(self, opt):
-        return opt
-    def getbool(self, opt):
-        if opt in ('0','f','F','n','N'):
-            return 0
-        return 1
-        ##-----framework-----##
-    def build(self):
-        print "Gathering components of %s" % self.name
-        self.gather()
-        logfile.write("Final Table of Contents for %s:\n" % self.name)
-        pprint.pprint(self.toc.toList(), logfile)
-        print "Creating %s" % self.name
-        self.assemble()
-        ##-----overrideables-----##
-    def edit(self):
-        pass
-    def gather(self):
-        pass
-    def assemble(self):
-        pass
-
-class PYZTarget(Target):
-    def __init__(self, cfg, sectnm, cnvrts):
-        Target.__init__(self, cfg, sectnm, cnvrts)
-        # to use a PYZTarget, you'll need imputil and archive
-        archivebuilder.GetCompiled([os.path.join(pyinsthome, 'imputil.py')])
-        print "pyinsthome:", pyinsthome
-        imputil = resource.makeresource('imputil.py', [pyinsthome])
-        self._dependencies.append(imputil)
-        archivebuilder.GetCompiled([os.path.join(pyinsthome, 'archive_rt.py')])
-        archmodule = resource.makeresource('archive_rt.py', [pyinsthome])
-        self._dependencies.merge(archmodule.dependencies())
-        self._dependencies.append(archmodule)
-        self.toc.addFilter(archmodule)
-        self.toc.addFilter(imputil)
-        for mod in archmodule.modules:
-            self.toc.addFilter(mod)
-    def edit(self):
-        if self.extypes:
-            print "PYZ target %s ignoring extypes = %s" % (self.__name__, self.extypes)
-
-    def gather(self):
-        for script in self.dependencies:
-            rsrc = resource.makeresource(script, self.pathprefix)
-            if not isinstance(rsrc, resource.scriptresource):
-                print "Bug alert - Made %s from %s!" % (rsrc, script)
-            self.toc.merge(rsrc.modules)
-        logfile.write("lTOC after expanding 'depends':\n")
-        pprint.pprint(self.toc.toList(), logfile)
-        for thingie in self.includes + self.directories + self.packages:
-            rsrc = resource.makeresource(thingie, self.pathprefix)
-##            if not isinstance(rsrc, resource.pythonresource):
-##                print "PYZ target %s ignoring include %s" % (self.name, thingie)
-##            else:
-            self.toc.merge(rsrc.contents())
-        logfile.write("lTOC after includes, dir, pkgs:\n")
-        pprint.pprint(self.toc.toList(), logfile)
-        self.toc.addFilter(tocfilter.ExtFilter(['.py', '.pyc', '.pyo'], 1))
-        logfile.write("Applying the following filters:\n")
-        pprint.pprint(self.toc.filters, logfile)
-        self.toc.filter()
-
-    def assemble(self):
-        contents = self.toc.toList()
-        if contents:
-            lib = archive.ZlibArchive()
-            lib.build(self.name, archivebuilder.GetCompiled(self.toc.toList()))
-
-class CollectTarget(Target):
-    def __init__(self, cfg, sectnm, cnvrts):
-        Target.__init__(self, cfg, sectnm, cnvrts)
-
-    _rsrcdict = {'COLLECT': resource.dirresource, 'PYZ': resource.zlibresource, 'CARCHIVE': resource.archiveresource}
-
-    def gather(self):
-        if self.support:
-            # the bare minimum
-            self.toc.merge([resource.makeresource('python20.dll')])
-            self.toc.merge([resource.makeresource('exceptions.pyc').asBinary()])
-        # zlib, bindepends, misc, trees, destdir
-        for i in range(len(self.zlib)):
-            # z refers to the section name
-            z = self.zlib[i]
-            nm = self.cfg.get(z, 'name')
-            try:
-                self.toc.merge([resource.makeresource(nm, ['.'])])
-            except ValueError:
-                # zlibs aren't written if they turn out to be empty
-                self.zlib[i] = None
-        self.zlib = filter(None, self.zlib)
-        if self.zlib:
-            target = built.get(self.zlib[0], None)
-            if target:
-                self.toc.merge(target._dependencies)
-        for script in self.bindepends:
-            rsrc = resource.makeresource(script, self.pathprefix)
-            self.toc.merge(rsrc.binaries)
-        logfile.write('ltoc after bindepends:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        for thingie in self.misc:
-            if thingie in self.cfg.sections():
-                name = self.cfg.get(thingie, "name")
-                typ = self.cfg.get(thingie, "type")
-                klass = self._rsrcdict.get(typ, resource.dataresource)
-                rsrc = apply(klass, (name, name))
-                #now make sure we have the stuff the resource requires
-                target = built.get(thingie, None)
-                if target:
-                    self.toc.merge(target._dependencies)
-            else:
-                rsrc = resource.makeresource(thingie, self.pathprefix)
-            self.toc.merge(rsrc.contents())
-        logfile.write('ltoc after misc:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        for script in self.script:
-            if string.find(script, '.') == -1:
-                script = script + '.py'
-            rsrc = resource.makeresource(script, self.pathprefix)
-            if rsrc.typ == 'm':
-                rsrc.typ = 's'
-            self.toc.merge([rsrc])
-        logfile.write('ltoc after scripts:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        for tree in self.trees:
-            try:
-                rsrc = resource.treeresource('.', tree)
-            except ValueError:
-                print "tree %s not found" % tree
-            else:
-                self.toc.merge(rsrc.contents())
-        logfile.write('ltoc after trees:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        self.toc.addFilter(tocfilter.TypeFilter(['d']))
-        logfile.write("Applying the following filters:\n")
-        pprint.pprint(self.toc.filters, logfile)
-        self.toc.filter()
-        #don't dupe stuff in a zlib that's part of this target
-        if self.zlib:
-           ztoc = ltoc.lTOC()
-           for zlibnm in self.zlib:
-               target = built.get(zlibnm, None)
-               if target:
-                   ztoc.merge(target.toc)
-           for i in range(len(self.toc)-1, -1, -1):
-               rsrc = self.toc[i]
-               if isinstance(rsrc, resource.moduleresource) and rsrc in ztoc:
-                   del self.toc[i]
-
-    def assemble(self):
-        if os.path.exists(self.name):
-            if os.path.isdir(self.name):
-                for fnm in os.listdir(self.name):
-                    try:
-                        os.remove(os.path.join(self.name, fnm))
-                    except:
-                        print "Could not delete file %s" % os.path.join(self.name, fnm)
-        else:
-            os.makedirs(self.name)
-        mysite = []
-        for nm, path, typ in self.toc.toList():
-            shutil.copy2(path, self.name)
-            if typ == 'z':
-                mysite.append('imputil.FuncImporter(archive.ZlibArchive("%s", 0).get_code).install()' % nm)
-        if mysite:
-            mysite.insert(0, 'import archive, imputil')
-            open(os.path.join(self.name, 'site.py'),'w').write(string.join(mysite, '\n'))
-
-
-class ArchiveTarget(CollectTarget):
-    usefullname = 1
-    def __init__(self, cfg, sectnm, cnvrts):
-        CollectTarget.__init__(self, cfg, sectnm, cnvrts)
-        archivebuilder.GetCompiled([os.path.join(pyinsthome, 'carchive_rt.py')])
-        carchmodule = resource.makeresource('carchive_rt.py', [pyinsthome])
-        self._dependencies.merge(carchmodule.dependencies())
-        self._dependencies.append(carchmodule)
-
-    def edit(self):
-        if self.destdir:
-            print "Warning 'destdir = %s' ignored for %s" % (self.destdir, self.name)
-
-    def gather(self):
-        CollectTarget.gather(self)
-
-    _cdict = {'s':2,'m':1,'b':1,'x':1,'a':0,'z':0, 'p':1}
-
-    def assemble(self, pkgnm=None):
-        if pkgnm is None:
-            pkgnm = self.name
-        arch = carchive.CArchive()
-        toc = []
-        pytoc = []
-        for nm, path, typ in self.toc.toList():
-            compress = self._cdict[typ]
-            if typ == 'b' or (self.usefullname and typ in 'ms'):
-                nm = os.path.basename(path)
-            if typ == 'm':
-                pytoc.append((nm, path, compress, typ))
-            else:
-                toc.append((nm, path, compress, typ))
-        toc = toc + archivebuilder.GetCompiled(pytoc)
-        arch.build(pkgnm, toc)
-        return arch
-
-class FullExeTarget(ArchiveTarget):
-    usefullname = 0
-    def __init__(self, cfg, sectnm, cnvrts):
-        ArchiveTarget.__init__(self, cfg, sectnm, cnvrts)
-
-    def gather(self):
-        for script in self.script:
-            #print "FullExeTarget.gather: script is", repr(script)
-            rsrc = resource.makeresource(script, self.pathprefix)
-            rsrc = resource.scriptresource(rsrc.name, rsrc.path)
-            #print " resource is", repr(rsrc)
-            self.toc.merge(rsrc.binaries)
-        ArchiveTarget.gather(self)
-        if not self.zlib:
-            self.toc.merge(rsrc.modules)
-        self._dependencies = ltoc.lTOC()
-
-    _cdict = {'s':2,'m':0,'b':1,'x':0,'a':0,'z':0}
-    _edict = { (1, 1):'Runw_d.exe', (1, 0):'Runw.exe', (0, 1):'Run_d.exe', (0, 0):'Run.exe'}
-
-    def assemble(self):
-        pkgname = tempfile.mktemp()
-        arch = ArchiveTarget.assemble(self, pkgname)
-        exe = self._edict[(self.userunw, self.debug)]
-        exe = os.path.normpath(os.path.join(pyinsthome, 'support', exe))
-##        copyFile([exe, pkgname], self.name)
-##        os.remove(pkgname)
-        # Thomas Heller's icon code
-        # my version
-        if self.icon:
-            myexe = tempfile.mktemp()
-            copyFile (exe, myexe)
-            try:
-                from icon import CopyIcons
-                CopyIcons(myexe, self.icon)
-            except ImportError:
-                print "win32api is required for updating icons"
-                print "You should have win32api.pyd and PyWinTypes20.dll"
-                print "in the installation directory."
-                print "Please copy them to Python's DLLS subdirectory"
-                print "(or install Mark Hammond's Win32 extensions)."
-##        iconfile = None
-##        for name in self.cfg.sections():
-##            if self.cfg.get (name, "type") == "STANDALONE":
-##                try:
-##                    iconfile = self.cfg.get (name, "iconfile")
-##                except:
-##                    pass
-##        if iconfile:
-##            from icon import CopyIcons
-##            CopyIcons (myexe, iconfile)
-            copyFile ([myexe, pkgname], self.name)
-            os.remove(myexe)
-        else:
-            copyFile([exe, pkgname], self.name)
-        #os.remove(pkgname)
-
-class ExeTarget(FullExeTarget):
-    def __init__(self, cfg, sectnm, cnvrts):
-        FullExeTarget.__init__(self, cfg, sectnm, cnvrts)
-
-    def edit(self):
-        if not self.script:
-            raise ValueError, "EXE target %s requires 'script= <script>'" % self.__name__
-
-    def gather(self):
-        FullExeTarget.gather(self)
-        for i in range(len(self.toc)-1, -1, -1):
-            rsrc = self.toc[i]
-            if rsrc.typ == 'b':
-                self._dependencies.append(rsrc)
-                del self.toc[i]
-
-installpreamble = """\
-import sys, os
-import installutils
-import carchive_rt
-idir = installutils.getinstalldir()
-me = sys.argv[0]
-if me[:-4] != '.exe':
-    me = me + '.exe'
-this = carchive_rt.CArchive(sys.argv[0])
-here = sys.path[0]
-"""
-mvfile = "installutils.copyFile(os.path.join(here, '%s'), os.path.join(idir, '%s'))\n"
-extractfile = "open(os.path.join(idir, '%s'), 'wb').write(this.extract('%s')[1])\n"
-sitepreamble = """\
-import archive_rt
-import imputil
-import sys
-"""
-importzlib = "imputil.FuncImporter(archive_rt.ZlibArchive(sys.path[0]+'/%s').get_code).install()\n"
-
-class InstallTarget(FullExeTarget):
-    def __init__(self, cfg, sectnm, cnvrts):
-        FullExeTarget.__init__(self, cfg, sectnm, cnvrts)
-
-    def edit(self):
-        if not self.script:
-            open('gen_install.py', 'w').write(installpreamble)
-            self.script = ['gen_install.py']
-
-    def gather(self):
-        FullExeTarget.gather(self)
-        if self.script[0] == 'gen_install.py':
-            f = open(self.script[0], 'a')
-            for rsrc in self.toc:
-                if isinstance(rsrc, resource.binaryresource):
-                    nm = os.path.basename(rsrc.path)
-                    f.write(mvfile % (nm, nm))
-                elif isinstance(rsrc, resource.pythonresource):
-                    pass
-                elif isinstance(rsrc, resource.zlibresource):
-                    pass
-                else:
-                    f.write(extractfile % (rsrc.name, rsrc.name))
-                    if isinstance(rsrc, resource.archiveresource):
-                        #did it come with an install script?
-                        target = built.get(rsrc.name, None)
-                        if target:
-                           if hasattr(target, "installscript"):
-                               for script in target.installscript:
-                                   s = resource.makeresource(script, self.pathprefix)
-                                   txt = open(s.path, 'r').read()
-                                   f.write(txt)
-            f.close()
-
-dispatch = {
-                'PYZ': PYZTarget,
-                'CARCHIVE': ArchiveTarget,
-                'COLLECT': CollectTarget,
-                'STANDALONE': ExeTarget,
-                'INSTALL': InstallTarget,
-                'FULLEXE': FullExeTarget,
-}
-
-
-def makeTarget(cfg, section):
-    return dispatch[cfg.get(section, 'type')](cfg, section, optcnvrts)
-
-optdefaults = { 'type':'PYZ',
-                'script':'',            # INSTALL (opt) & STANDALONE (required)
-                'zlib':'',              # INSTALL, STANDALONE, COLLECT
-                'bindepends':'',        # INSTALL, COLLECT
-                'misc':'',              # INSTALL. COLLECT
-                'includetk': '0',       # INSTALL, COLLECT
-        'userunw': '0',         # STANDALONE
-                'dependencies':'',      # PYZ
-                'directories':'',       # PYZ
-                'excludes':'',          # PYZ, INSTALL, COLLECT
-                'expatterns': '',
-                'exstdlib': '0',
-                'extypes': '',
-                'includes':'',          # PYZ
-                'packages':'',          # PYZ
-                'destdir':'',           # COLLECT
-                'pathprefix': '',
-                'trees': '',
-                'debug': '0',
-                'support': '1', # include python20.dll & exceptons.pyc at a minimum
-                'icon': '',
-}
-
-optcnvrts = {   'type':'',
-                'name': 'getstring',
-                'exstdlib': 'getbool',
-                'console': 'getbool',
-                'analyze': 'getbool',
-                'debug': 'getbool',
-                'includetk': 'getbool',
-                'userunw': 'getbool',
-                'destdir': 'getstring',
-                'support': 'getbool',
-                '__name__': 'getstring',
-                'icon': 'getstring',
-}
-def main(opts, args):
-    global pyinsthome
-    global copyFile
-    pyinsthome = os.path.abspath(os.path.dirname(sys.argv[0]))
-    # sys.path.insert(0, os.path.join(pyinsthome, 'support'))
-    import installutils
-    copyFile = installutils.copyFile
-    global logfile
-    logfile = open('Builder.log','w')
-    targets = []
-    xref = {}
-    cfg = ConfigParser.ConfigParser(optdefaults)
-    for arg in args:
-        dirnm = os.path.dirname(arg)
-        if dirnm == '':
-            dirnm = '.'
-        autopath.append(os.path.abspath(dirnm))
-    cfg.read(args)
-    for section in cfg.sections():
-        target = makeTarget(cfg, section)
-        targets.append(target)
-        xref[section] = target
-    while targets:
-        for i in range(len(targets)):
-            target = targets[i]
-            for child in target.children:
-                if xref[child] in targets:
-                    break
-            else:       #no break - ready to build
-                target.dump()
-                target.build()
-                built[target.__name__] = target
-                built[target.name] = target
-                targets[i] = None
-                break
-        else:       #no break - couldn't find anything to build
-            names = map(lambda x: getattr(x, 'name'), targets)
-            raise RuntimeError, "circular dependencies in %s" % repr(names)
-        targets = filter(None, targets)
-
-def run(file):
-    main ([], file)
-
-if __name__ == '__main__':
-    import getopt
-    (opts, args) = getopt.getopt(sys.argv[1:], 'dv')
-    print "opts:", opts
-    print "args:", args
-    main(opts, args)

+ 0 - 0
direct/src/pyinst/Sources.pp


+ 0 - 0
direct/src/pyinst/__init__.py


+ 0 - 246
direct/src/pyinst/archive.py

@@ -1,246 +0,0 @@
-#
-# Gordon McMillan (as inspired and influenced by Greg Stein)
-#
-
-# subclasses may not need marshal or struct, but since they're
-# builtin, importing is safe.
-#
-# While an Archive is really an abstraction for any "filesystem
-# within a file", it is tuned for use with imputil.FuncImporter.
-# This assumes it contains python code objects, indexed by the
-# the internal name (ie, no '.py').
-# See carchive.py for a more general archive (contains anything)
-# that can be understood by a C program.
-
-import marshal
-import struct
-
-class Archive:
-  """ A base class for a repository of python code objects.
-
-      The get_code method is used by imputil.FuntionImporter
-      to get code objects by name.
-      Archives are flat namespaces, so conflict between module
-      names in different packages are possible. Use a different
-      Archive for each package.
-  """
-  MAGIC = 'PYL\0'
-  HDRLEN = 12        # default is MAGIC followed by python's magic, int pos of toc
-  TOCPOS = 8
-  TRLLEN = 0        # default - no trailer
-  TOCTMPLT = {}     #
-  os = None
-  def __init__(self, path=None, start=0):
-    """
-         Initialize an Archive. If path is omitted, it will be an empty Archive.
-         start is the seek position within path where the Archive starts."""
-    self.toc = None
-    self.path = path
-    self.start = start
-    import imp
-    self.pymagic = imp.get_magic()
-    if path is not None:
-      self.lib = open(self.path, 'rb')
-      self.checkmagic()
-      self.loadtoc()
-
-  ####### Sub-methods of __init__ - override as needed #############
-  def checkmagic(self):
-    """Verify version and validity of file.
-
-        Overridable.
-        Check to see if the file object self.lib actually has a file
-        we understand.
-    """
-    self.lib.seek(self.start)   #default - magic is at start of file
-    if self.lib.read(len(self.MAGIC)) != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    if self.lib.read(len(self.pymagic)) != self.pymagic:
-      raise RuntimeError, "%s has version mismatch to dll" % (self.path)
-
-  def loadtoc(self):
-    """Load the table of contents.
-
-        Overridable.
-        Default: After magic comes an int (4 byte native) giving the
-        position of the TOC within self.lib.
-        Default: The TOC is a marshal-able string.
-    """
-    self.lib.seek(self.start + self.TOCPOS)
-    (offset,) = struct.unpack('=i', self.lib.read(4))
-    self.lib.seek(self.start + offset)
-    self.toc = marshal.load(self.lib)
-
-  ######## This is what is called by FuncImporter #######
-  ## Since an Archive is flat, we ignore parent and modname.
-
-  def get_code(self, parent, modname, fqname):
-    """The import hook.
-
-       Called by imputil.FunctionImporter.
-       Override extract to tune getting code from the Archive."""
-    rslt = self.extract(fqname) # None if not found, (ispkg, code) otherwise
-    if rslt is None:
-      return None
-    ispkg, code = rslt
-    if ispkg:
-      return ispkg, code, {'__path__': []}
-    return rslt
-
-  ####### Core method - Override as needed  #########
-  def extract(self, name):
-    """ Get the object corresponding to name, or None.
-
-        NAME is the name as specified in an 'import name'.
-        'import a.b' will become:
-        extract('a') (return None because 'a' is not a code object)
-        extract('a.__init__') (return a code object)
-        extract('a.b') (return a code object)
-        Default implementation:
-          self.toc is a dict
-          self.toc[name] is pos
-          self.lib has the code object marshal-ed at pos
-    """
-    ispkg, pos = self.toc.get(name, (0, None))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.load(self.lib)
-
-  ########################################################################
-  # Informational methods
-
-  def contents(self):
-    """Return a list of the contents.
-
-       Default implementation assumes self.toc is a dict like object.
-    """
-    return self.toc.keys()
-
-  ########################################################################
-  # Building
-
-  ####### Top level method - shouldn't need overriding #######
-  def build(self, path, lTOC):
-    """Create an archive file of name PATH from LTOC.
-
-       lTOC is a 'logical TOC' - a list of (name, path, ...)
-       where name is the internal (import) name,
-       and path is a file to get the object from, eg './a.pyc'.
-    """
-    self.path = path
-    self.lib = open(path, 'wb')
-    #reserve space for the header
-    if self.HDRLEN:
-      self.lib.write('\0'*self.HDRLEN)
-
-    #create an empty toc
-
-    if type(self.TOCTMPLT) == type({}):
-      self.toc = {}
-    else:       # assume callable
-      self.toc = self.TOCTMPLT()
-
-    for tocentry in lTOC:
-      self.add(tocentry)   # the guts of the archive
-
-    tocpos = self.lib.tell()
-    self.save_toc(tocpos)
-    if self.TRLLEN:
-      self.save_trailer(tocpos)
-    if self.HDRLEN:
-      self.update_headers(tocpos)
-    self.lib.close()
-
-
-  ####### manages keeping the internal TOC and the guts in sync #######
-  def add(self, entry):
-    """Add an entry to the archive.
-
-      Override this to influence the mechanics of the Archive.
-       Assumes entry is a seq beginning with (nm, pth, ...) where
-       nm is the key by which we'll be asked for the object.
-       pth is the name of where we find the object.
-    """
-    if self.os is None:
-      import os
-      self.os = os
-    nm = entry[0]
-    pth = entry[1]
-    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-    self.toc[nm] = (ispkg, self.lib.tell())
-    f = open(entry[1], 'rb')
-    f.seek(8)   #skip magic and timestamp
-    self.lib.write(f.read())
-
-  def save_toc(self, tocpos):
-    """Save the table of contents.
-
-       Default - toc is a dict
-       Gets marshaled to self.lib
-    """
-    marshal.dump(self.toc, self.lib)
-
-  def save_trailer(self, tocpos):
-    """Placeholder for Archives with trailers."""
-    pass
-
-  def update_headers(self, tocpos):
-    """Update any header data.
-
-       Default header is  MAGIC + Python's magic + tocpos"""
-    self.lib.seek(self.start)
-    self.lib.write(self.MAGIC)
-    self.lib.write(self.pymagic)
-    self.lib.write(struct.pack('=i', tocpos))
-
-##############################################################
-#
-# ZlibArchive - an archive with compressed entries
-#
-
-class ZlibArchive(Archive):
-  """A subclass of Archive that compresses entries with zlib
-     and uses a (marshalled) dict as a table of contents"""
-  MAGIC = 'PYZ\0'
-  TOCPOS = 8
-  HDRLEN = 12
-  TRLLEN = 0
-  TOCTMPLT = {}
-  LEVEL = 9
-
-  def __init__(self, path=None, offset=0):
-    Archive.__init__(self, path, offset)
-    # dynamic import so not imported if not needed
-    global zlib
-    import zlib
-
-  def extract(self, name):
-    """Get the code object for NAME.
-
-       Return None if name is not in the table of contents.
-       Otherwise, return a tuple (ispkg, code)"""
-    (ispkg, pos, lngth) = self.toc.get(name, (0, None, 0))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.loads(zlib.decompress(self.lib.read(lngth)))
-
-  def add(self, entry):
-    """Add an entry.
-
-       ENTRY is a sequence where entry[0] is name and entry[1] is full path name.
-       zlib compress the code object, and build a toc entry"""
-    if self.os is None:
-      import os
-      self.os = os
-    nm = entry[0]
-    pth = entry[1]
-    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-    f = open(pth, 'rb')
-    f.seek(8)   #skip magic and timestamp
-    obj = zlib.compress(f.read(), self.LEVEL)
-    self.toc[nm] = (ispkg, self.lib.tell(), len(obj))
-    self.lib.write(obj)
-

+ 0 - 226
direct/src/pyinst/archive_rt.py

@@ -1,226 +0,0 @@
-#
-# Gordon McMillan (as inspired and influenced by Greg Stein)
-#
-
-# subclasses may not need marshal or struct, but since they're
-# builtin, importing is safe.
-#
-# While an Archive is really an abstraction for any "filesystem
-# within a file", it is tuned for use with imputil.FuncImporter.
-# This assumes it contains python code objects, indexed by the
-# the internal name (ie, no '.py').
-# See carchive.py for a more general archive (contains anything)
-# that can be understood by a C program.
-
-#archive_rt is a stripped down version of MEInc.Dist.archive.
-#It has had all building logic removed.
-#It's purpose is to bootstrap the Python installation.
-
-import marshal
-import struct
-
-class Archive:
-  """ A base class for a repository of python code objects.
-      The extract method is used by imputil.ArchiveImporter
-      to get code objects by name (fully qualified name), so
-      an enduser "import a.b" would become
-        extract('a.__init__')
-        extract('a.b')
-  """
-  MAGIC = 'PYL\0'
-  HDRLEN = 12        # default is MAGIC followed by python's magic, int pos of toc
-  TOCPOS = 8
-  TRLLEN = 0        # default - no trailer
-  TOCTMPLT = {}     #
-  os = None
-  def __init__(self, path=None, start=0):
-    "Initialize an Archive. If path is omitted, it will be an empty Archive."
-    self.toc = None
-    self.path = path
-    self.start = start
-    import imp
-    self.pymagic = imp.get_magic()
-    if path is not None:
-      self.lib = open(self.path, 'rb')
-      self.checkmagic()
-      self.loadtoc()
-
-  ####### Sub-methods of __init__ - override as needed #############
-  def checkmagic(self):
-    """ Overridable.
-        Check to see if the file object self.lib actually has a file
-        we understand.
-    """
-    self.lib.seek(self.start)   #default - magic is at start of file
-    if self.lib.read(len(self.MAGIC)) != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    if self.lib.read(len(self.pymagic)) != self.pymagic:
-      raise RuntimeError, "%s has version mismatch to dll" % (self.path)
-
-  def loadtoc(self):
-    """ Overridable.
-        Default: After magic comes an int (4 byte native) giving the
-        position of the TOC within self.lib.
-        Default: The TOC is a marshal-able string.
-    """
-    self.lib.seek(self.start + self.TOCPOS)
-    (offset,) = struct.unpack('=i', self.lib.read(4))
-    self.lib.seek(self.start + offset)
-    self.toc = marshal.load(self.lib)
-
-  ######## This is what is called by FuncImporter #######
-  ## Since an Archive is flat, we ignore parent and modname.
-
-  def get_code(self, parent, modname, fqname):
-    print "parent: ", parent
-    print "modname: ", modname
-    print "fqname: ", fqname
-    return self.extract(fqname) # None if not found, (ispkg, code) otherwise
-    if rslt is None:
-      return None
-    ispkg, code = rslt
-    if ispkg:
-      return ispkg, code, {'__path__': []}
-    return rslt
-
-  ####### Core method - Override as needed  #########
-  def extract(self, name):
-    """ Get the object corresponding to name, or None.
-        For use with imputil ArchiveImporter, object is a python code object.
-        'name' is the name as specified in an 'import name'.
-        'import a.b' will become:
-        extract('a') (return None because 'a' is not a code object)
-        extract('a.__init__') (return a code object)
-        extract('a.b') (return a code object)
-        Default implementation:
-          self.toc is a dict
-          self.toc[name] is pos
-          self.lib has the code object marshal-ed at pos
-    """
-    ispkg, pos = self.toc.get(name, (0, None))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.load(self.lib)
-
-  ########################################################################
-  # Informational methods
-
-  def contents(self):
-    """Return a list of the contents
-       Default implementation assumes self.toc is a dict like object.
-       Not required by ArchiveImporter.
-    """
-    return self.toc.keys()
-
-  ########################################################################
-  # Building
-
-  ####### Top level method - shouldn't need overriding #######
-##  def build(self, path, lTOC):
-##    """Create an archive file of name 'path'.
-##       lTOC is a 'logical TOC' - a list of (name, path, ...)
-##       where name is the internal name, eg 'a'
-##       and path is a file to get the object from, eg './a.pyc'.
-##    """
-##    self.path = path
-##    self.lib = open(path, 'wb')
-##    #reserve space for the header
-##    if self.HDRLEN:
-##      self.lib.write('\0'*self.HDRLEN)
-##
-##    #create an empty toc
-##
-##    if type(self.TOCTMPLT) == type({}):
-##      self.toc = {}
-##    else:       # assume callable
-##      self.toc = self.TOCTMPLT()
-##
-##    for tocentry in lTOC:
-##      self.add(tocentry)   # the guts of the archive
-##
-##    tocpos = self.lib.tell()
-##    self.save_toc(tocpos)
-##    if self.TRLLEN:
-##      self.save_trailer(tocpos)
-##    if self.HDRLEN:
-##      self.update_headers(tocpos)
-##    self.lib.close()
-##
-##
-##  ####### manages keeping the internal TOC and the guts in sync #######
-##  def add(self, entry):
-##    """Override this to influence the mechanics of the Archive.
-##       Assumes entry is a seq beginning with (nm, pth, ...) where
-##       nm is the key by which we'll be asked for the object.
-##       pth is the name of where we find the object. Overrides of
-##       get_obj_from can make use of further elements in entry.
-##    """
-##    if self.os is None:
-##      import os
-##      self.os = os
-##    nm = entry[0]
-##    pth = entry[1]
-##    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-##    self.toc[nm] = (ispkg, self.lib.tell())
-##    f = open(entry[1], 'rb')
-##    f.seek(8) #skip magic and timestamp
-##    self.lib.write(f.read())
-##
-##  def save_toc(self, tocpos):
-##    """Default - toc is a dict
-##       Gets marshaled to self.lib
-##    """
-##    marshal.dump(self.toc, self.lib)
-##
-##  def save_trailer(self, tocpos):
-##    """Default - not used"""
-##    pass
-##
-##  def update_headers(self, tocpos):
-##    """Default - MAGIC + Python's magic + tocpos"""
-##    self.lib.seek(self.start)
-##    self.lib.write(self.MAGIC)
-##    self.lib.write(self.pymagic)
-##    self.lib.write(struct.pack('=i', tocpos))
-
-##############################################################
-#
-# ZlibArchive - an archive with compressed entries
-#
-
-class ZlibArchive(Archive):
-  MAGIC = 'PYZ\0'
-  TOCPOS = 8
-  HDRLEN = 12
-  TRLLEN = 0
-  TOCTMPLT = {}
-  LEVEL = 9
-
-  def __init__(self, path=None, offset=0):
-    Archive.__init__(self, path, offset)
-    # dynamic import so not imported if not needed
-    global zlib
-    import zlib
-
-  def extract(self, name):
-    (ispkg, pos, lngth) = self.toc.get(name, (0, None, 0))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.loads(zlib.decompress(self.lib.read(lngth)))
-
-##  def add(self, entry):
-##    if self.os is None:
-##      import os
-##      self.os = os
-##    nm = entry[0]
-##    pth = entry[1]
-##    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-##    f = open(pth, 'rb')
-##    f.seek(8) #skip magic and timestamp
-##    obj = zlib.compress(f.read(), self.LEVEL)
-##    self.toc[nm] = (ispkg, self.lib.tell(), len(obj))
-##    self.lib.write(obj)
-##

+ 0 - 81
direct/src/pyinst/archivebuilder.py

@@ -1,81 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-# Gordon McMillan [email protected]
-#
-# A collection of routines for building a logical Table Of Contents
-# that Archive (subclasses) use to build themselves.
-# A logical Table of Contents is a sequence, each element of which is
-# a sequence, with at least 2 entries - "name" and "path".
-
-import os
-
-import string
-
-import py_compile
-
-def GetCompiled(seq, lvl='c'):
-  """SEQ is a list of .py files, or a logical TOC.
-     Return as .pyc or .pyo files (LVL) after ensuring their existence"""
-  if len(seq) == 0:
-    return seq
-  rslt = []
-  isTOC = 0
-  if type(seq[0]) == type(()):
-    isTOC = 1
-  for py in seq:
-    if isTOC:
-      (nm, fnm), rest = py[:2], py[2:]
-    else:
-      fnm = py
-    fnm = os.path.splitext(fnm)[0] + '.py'
-    cmpl = 1
-    pyc = fnm + lvl
-    if os.path.exists(pyc):
-      pytm = long(os.stat(fnm)[8])
-      ctm = long(os.stat(pyc)[8])
-      if pytm < ctm:
-        cmpl = 0
-    if cmpl:
-      py_compile.compile(fnm, pyc)
-    if isTOC:
-      rslt.append((nm, pyc)+rest)
-    else:
-      rslt.append(pyc)
-  return rslt
-
-import modulefinder
-MF = modulefinder
-import sys
-
-def Dependencies(script):
-  """Get a logical TOC directly from the dependencies of a script.
-  
-     The returned TOC does NOT contain the script.
-     It does contain extension modules. Uses modulefinder."""
-  rslt = []
-  (dir, name) = os.path.split(script)
-  if dir:
-    ppath = [os.path.normpath(dir)] + sys.path
-  else:
-    ppath = sys.path[:]
-  mf = MF.ModuleFinder(ppath, 0)
-  try:
-    mf.run_script(script)
-  except IOError:
-    print " Script not found:", script
-    return []
-  del mf.modules['__main__']
-  for (k, v) in mf.modules.items():
-    if v.__file__ is None:
-      del mf.modules[k]  # a builtin
-  for (k, v) in mf.modules.items():
-    #ispkg = os.path.basename(v.__file__) == '__init__.py'
-    d = os.path.dirname(v.__file__)
-    if not d:
-      v.__file__ = os.path.join(os.getcwd(), v.__file__)
-    #if ispkg:
-    #    rslt.append(k+'.__init__', v.__file__)
-    #else:
-    rslt.append((k, v.__file__))
-  return rslt
-

+ 0 - 169
direct/src/pyinst/bindepend.py

@@ -1,169 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-#
-# use dumpbin.exe (if present) to find the binary
-# dependencies of an extension module.
-# if dumpbin not available, pick apart the PE hdr of the binary
-# while this appears to work well, it is complex and subject to
-# problems with changes to PE hdrs (ie, this works only on 32 bit Intel
-# Windows format binaries)
-#
-# Note also that you should check the results to make sure that the
-# dlls are redistributable. I've listed most of the common MS dlls
-# under "excludes" below; add to this list as necessary (or use the
-# "excludes" option in the INSTALL section of the config file).
-
-import os
-import time
-import string
-import sys
-import tempfile
-import finder
-
-seen = {}
-excludes = {'KERNEL32.DLL':1,
-      'ADVAPI.DLL':1,
-      'MSVCRT.DLL':1,
-      'ADVAPI32.DLL':1,
-      'COMCTL32.DLL':1,
-      'CRTDLL.DLL':1,
-      'GDI32.DLL':1,
-      'MFC42.DLL':1,
-      'NTDLL.DLL':1,
-      'OLE32.DLL':1,
-      'OLEAUT32.DLL':1,
-      'RPCRT4.DLL':1,
-      'SHELL32.DLL':1,
-      'USER32.DLL':1,
-      'WINSPOOL.DRV':1,
-      'WS2HELP.DLL':1,
-      'WS2_32.DLL':1,
-      'WSOCK32.DLL':1,
-      'WINMM.DLL':1,
-      'COMDLG32.DLL':1,
-      'ZLIB.DLL':1,
-      'ODBC32.DLL':1,
-      'VERSION.DLL':1}
-
-def getfullnameof(mod, xtrapath = None):
-  """Return the full path name of MOD.
-
-      MOD is the basename of a dll or pyd.
-      XTRAPATH is a path or list of paths to search first.
-      Return the full path name of MOD.
-      Will search the full Windows search path, as well as sys.path"""
-  epath = finder.getpath()
-  if mod[-4:] in ('.pyd', '.PYD'):
-    epath = epath + sys.path
-  if xtrapath is not None:
-    if type(xtrapath) == type(''):
-      epath.insert(0, xtrapath)
-    else:
-      epath = xtrapath + epath
-  for p in epath:
-    npth = os.path.join(p, mod)
-    if os.path.exists(npth):
-      return npth
-  return ''
-
-def getImports1(pth):
-    """Find the binary dependencies of PTH.
-
-        This implementation (not used right now) uses the MSVC utility dumpbin"""
-    rslt = []
-    tmpf = tempfile.mktemp()
-    os.system('dumpbin /IMPORTS "%s" >%s' %(pth, tmpf))
-    time.sleep(0.1)
-    txt = open(tmpf,'r').readlines()
-    os.remove(tmpf)
-    i = 0
-    while i < len(txt):
-        tokens = string.split(txt[i])
-        if len(tokens) == 1 and string.find(tokens[0], '.') > 0:
-            rslt.append(string.strip(tokens[0]))
-        i = i + 1
-    return rslt
-
-def getImports2(pth):
-    """Find the binary dependencies of PTH.
-
-        This implementation walks through the PE header"""
-    import struct
-    rslt = []
-    try:
-      f = open(pth, 'rb').read()
-      pehdrd = struct.unpack('l', f[60:64])[0]
-      magic = struct.unpack('l', f[pehdrd:pehdrd+4])[0]
-      numsecs = struct.unpack('h', f[pehdrd+6:pehdrd+8])[0]
-      numdirs = struct.unpack('l', f[pehdrd+116:pehdrd+120])[0]
-      idata = ''
-      if magic == 17744:
-          importsec, sz = struct.unpack('2l', f[pehdrd+128:pehdrd+136])
-          secttbl = pehdrd + 120 + 8*numdirs
-          secttblfmt = '8s7l2h'
-          seclist = []
-          for i in range(numsecs):
-              seclist.append(struct.unpack(secttblfmt, f[secttbl+i*40:secttbl+(i+1)*40]))
-              #nm, vsz, va, rsz, praw, preloc, plnnums, qrelocs, qlnnums, flags \
-              # = seclist[-1]
-          for i in range(len(seclist)-1):
-              if seclist[i][2] <= importsec < seclist[i+1][2]:
-                  break
-          vbase = seclist[i][2]
-          raw = seclist[i][4]
-          idatastart = raw + importsec - vbase
-          idata = f[idatastart:idatastart+seclist[i][1]]
-          i = 0
-          while 1:
-              vsa =  struct.unpack('5l', idata[i*20:i*20+20])[3]
-              if vsa == 0:
-                  break
-              sa = raw + vsa - vbase
-              end = string.find(f, '\000', sa)
-              rslt.append(f[sa:end])
-              i = i + 1
-    except IOError:
-      print "bindepend cannot analyze %s - file not found!"
-    except struct.error:
-      print "bindepend cannot analyze %s - error walking thru pehdr"
-    return rslt
-
-def Dependencies(lTOC):
-  """Expand LTOC to include all the closure of binary dependencies.
-
-     LTOC is a logical table of contents, ie, a seq of tuples (name, path).
-     Return LTOC expanded by all the binary dependencies of the entries
-     in LTOC, except those listed in the module global EXCLUDES"""
-  for (nm, pth) in lTOC:
-    fullnm = string.upper(os.path.basename(pth))
-    if seen.get(string.upper(nm), 0):
-      continue
-    print "analyzing", nm
-    seen[string.upper(nm)] = 1
-    dlls = getImports(pth)
-    for lib in dlls:
-        print " found", lib
-        if excludes.get(string.upper(lib), 0):
-          continue
-        if seen.get(string.upper(lib), 0):
-          continue
-        npth = getfullnameof(lib)
-        if npth:
-          lTOC.append((lib, npth))
-        else:
-          print " lib not found:", lib, "dependency of",
-  return lTOC
-
-
-##if getfullnameof('dumpbin.exe') == '':
-##    def getImports(pth):
-##        return getImports2(pth)
-##else:
-##    def getImports(pth):
-##        return getImports1(pth)
-
-def getImports(pth):
-    """Forwards to either getImports1 or getImports2
-    """
-    return getImports2(pth)
-

+ 0 - 204
direct/src/pyinst/carchive.py

@@ -1,204 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-#
-# A subclass of Archive that can be understood
-# by a C program. See uplaunch.cpp for unpacking
-# from C.
-import archive
-import struct
-import zlib
-import strop
-
-class CTOC:
-  """A class encapsulating the table of contents of a CArchive.
-  
-     When written to disk, it is easily read from C."""
-  ENTRYSTRUCT = 'iiiibc' #(structlen, dpos, dlen, ulen, flag, typcd) followed by name
-  def __init__(self):
-    self.data = []
-  
-  def frombinary(self, s):
-    """Decode the binary string into an in memory list.
-    
-        S is a binary string."""
-    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-    p = 0
-    while p<len(s):
-      (slen, dpos, dlen, ulen, flag, typcd) = struct.unpack(self.ENTRYSTRUCT, 
-                                                  s[p:p+entrylen]) 
-      nmlen = slen - entrylen 
-      p = p + entrylen
-      (nm,) = struct.unpack(repr(nmlen)+'s', s[p:p+nmlen])
-      p = p + nmlen 
-      self.data.append((dpos, dlen, ulen, flag, typcd, nm[:-1]))
-
-  def tobinary(self):
-    """Return self as a binary string."""
-    import string
-    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-    rslt = []
-    for (dpos, dlen, ulen, flag, typcd, nm) in self.data:
-      nmlen = len(nm) + 1       # add 1 for a '\0'
-      rslt.append(struct.pack(self.ENTRYSTRUCT+repr(nmlen)+'s',
-        nmlen+entrylen, dpos, dlen, ulen, flag, typcd, nm+'\0'))
-    return string.join(rslt, '')
-
-  def add(self, dpos, dlen, ulen, flag, typcd, nm):
-    """Add an entry to the table of contents.
-    
-       DPOS is data position.
-       DLEN is data length.
-       ULEN is the uncompressed data len.
-       FLAG says if the data is compressed.
-       TYPCD is the "type" of the entry (used by the C code)
-       NM is the entry's name."""
-    self.data.append((dpos, dlen, ulen, flag, typcd, nm))
-
-  def get(self, ndx):
-    """return the toc entry (tuple) at index NDX"""
-    return self.data[ndx]
-
-  def __getitem__(self, ndx):
-    return self.data[ndx]
-
-  def find(self, name):
-    """Return the index of the toc entry with name NAME.
-    
-       Return -1 for failure."""
-    for i in range(len(self.data)):
-      if self.data[i][-1] == name:
-        return i
-    return -1
-
-class CArchive(archive.Archive):
-  """An Archive subclass that an hold arbitrary data.
-  
-     Easily handled from C or from Python."""
-  MAGIC = 'MEI\014\013\012\013\015'
-  HDRLEN = 0
-  TOCTMPLT = CTOC
-  TRLSTRUCT = '8siii'
-  TRLLEN = 20
-  LEVEL = 9
-  def __init__(self, path=None, start=0, len=0):
-    """Constructor.
-    
-       PATH is path name of file (create an empty CArchive if path is None).
-       START is the seekposition within PATH.
-       LEN is the length of the CArchive (if 0, then read till EOF). """
-    self.len = len
-    archive.Archive.__init__(self, path, start)
-
-  def checkmagic(self):
-    """Verify that self is a valid CArchive.
-    
-        Magic signature is at end of the archive."""
-    #magic is at EOF; if we're embedded, we need to figure where that is
-    if self.len:
-      self.lib.seek(self.start+self.len, 0)
-    else:
-      self.lib.seek(0, 2)
-    filelen = self.lib.tell()
-    if self.len:
-      self.lib.seek(self.start+self.len-self.TRLLEN, 0)
-    else:
-      self.lib.seek(-self.TRLLEN, 2)
-    (magic, totallen, tocpos, toclen) = struct.unpack(self.TRLSTRUCT, 
-                                                self.lib.read(self.TRLLEN))
-    if magic != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    self.pkgstart = filelen - totallen
-    if self.len:
-      if totallen != self.len or self.pkgstart != self.start:
-        raise RuntimeError, "Problem with embedded archive in %s" % self.path
-    self.tocpos, self.toclen = tocpos, toclen
-
-  def loadtoc(self):
-    """Load the table of contents into memory."""
-    self.toc = self.TOCTMPLT()
-    self.lib.seek(self.pkgstart+self.tocpos)
-    tocstr = self.lib.read(self.toclen)
-    self.toc.frombinary(tocstr)
-
-  def extract(self, name):
-    """Get the contents of an entry.
-    
-       NAME is an entry name.
-       Return the tuple (ispkg, contents).
-       For non-Python resoures, ispkg is meaningless (and 0).
-       Used by the import mechanism."""
-    if type(name) == type(''):
-      ndx = self.toc.find(name)
-      if ndx == -1:
-        return None
-    else:
-      ndx = name
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    self.lib.seek(self.pkgstart+dpos)
-    rslt = self.lib.read(dlen)
-    if flag == 1:
-      rslt = zlib.decompress(rslt)
-    if typcd == 'M':
-      return (1, rslt)
-    return (0, rslt)
-
-  def contents(self):
-    """Return the names of the entries"""
-    rslt = []
-    for (dpos, dlen, ulen, flag, typcd, nm) in self.toc:
-      rslt.append(nm)
-    return rslt
-
-  def add(self, entry):
-    """Add an ENTRY to the CArchive.
-    
-       ENTRY must have:
-         entry[0] is name (under which it will be saved).
-         entry[1] is fullpathname of the file.
-         entry[2] is a flag for it's storage format (0==uncompressed,
-         1==compressed, 2==Python source format)
-         entry[3] is the entry's type code."""
-    (nm, pathnm, flag, typcd) = entry[:4]
-    if flag == 2:
-        s = open(pathnm, 'r').read()
-        s = s + '\n\0'
-    else:
-        s = open(pathnm, 'rb').read()
-    ulen = len(s)
-    if flag == 1:
-      s = zlib.compress(s, self.LEVEL)
-    dlen = len(s)
-    where = self.lib.tell()
-    if typcd == 'm':
-      if strop.find(pathnm, '.__init__.py') > -1:
-        typcd = 'M'
-    self.toc.add(where, dlen, ulen, flag, typcd, nm)
-    self.lib.write(s)
-
-  def save_toc(self, tocpos):
-    """Save the table of contents to disk."""
-    self.tocpos = tocpos
-    tocstr = self.toc.tobinary()
-    self.toclen = len(tocstr)
-    self.lib.write(tocstr)
-
-  def save_trailer(self, tocpos):
-    """Save the trailer to disk.
-    
-       CArchives can be opened from the end - the trailer points
-       back to the start. """
-    totallen = tocpos + self.toclen + self.TRLLEN
-    trl = struct.pack(self.TRLSTRUCT, self.MAGIC, totallen, 
-                      tocpos, self.toclen)
-    self.lib.write(trl)
-
-  def openEmbedded(self, name):
-    """Open a CArchive of name NAME embedded within this CArchive."""
-    ndx = self.toc.find(name)
-    if ndx == -1:
-      raise KeyError, "Member '%s' not found in %s" % (name, self.path)
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    if flag:
-      raise ValueError, "Cannot open compressed archive %s in place"
-    return CArchive(self.path, self.pkgstart+dpos, dlen)

+ 0 - 157
direct/src/pyinst/carchive_rt.py

@@ -1,157 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-#
-# A subclass of Archive that can be understood
-# by a C program. See uplaunch.cpp for unpacking
-# from C.
-
-#carchive_rt is a stripped down version of MEInc.Dist.carchive.
-#It has had all building logic removed.
-#It's purpose is to bootstrap the Python installation.
-
-import archive_rt
-import struct
-import zlib
-import strop
-
-class CTOC:
-  ENTRYSTRUCT = 'iiiibc' #(structlen, dpos, dlen, ulen, flag, typcd) followed by name
-  def __init__(self):
-    self.data = []
-  
-  def frombinary(self, s):
-    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-    p = 0
-    while p<len(s):
-      (slen, dpos, dlen, ulen, flag, typcd) = struct.unpack(self.ENTRYSTRUCT, 
-                                                  s[p:p+entrylen]) 
-      nmlen = slen - entrylen 
-      p = p + entrylen
-      (nm,) = struct.unpack(repr(nmlen)+'s', s[p:p+nmlen])
-      p = p + nmlen 
-      self.data.append((dpos, dlen, ulen, flag, typcd, nm[:-1]))
-
-##  def tobinary(self):
-##    import string
-##    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-##    rslt = []
-##    for (dpos, dlen, ulen, flag, typcd, nm) in self.data:
-##      nmlen = len(nm) + 1     # add 1 for a '\0'
-##      rslt.append(struct.pack(self.ENTRYSTRUCT+repr(nmlen)+'s',
-##        nmlen+entrylen, dpos, dlen, ulen, flag, typcd, nm+'\0'))
-##    return string.join(rslt, '')
-##
-##  def add(self, dpos, dlen, ulen, flag, typcd, nm):
-##    self.data.append(dpos, dlen, ulen, flag, typcd, nm)
-
-  def get(self, ndx):
-    return self.data[ndx]
-
-  def __getitem__(self, ndx):
-    return self.data[ndx]
-
-  def find(self, name):
-    for i in range(len(self.data)):
-      if self.data[i][-1] == name:
-        return i
-    return -1
-
-class CArchive(archive_rt.Archive):
-  MAGIC = 'MEI\014\013\012\013\015'
-  HDRLEN = 0
-  TOCTMPLT = CTOC
-  TRLSTRUCT = '8siii'
-  TRLLEN = 20
-  LEVEL = 9
-  def __init__(self, path=None, start=0, len=0):
-    self.len = len
-    archive_rt.Archive.__init__(self, path, start)
-
-  def checkmagic(self):
-    #magic is at EOF; if we're embedded, we need to figure where that is
-    if self.len:
-      self.lib.seek(self.start+self.len, 0)
-    else:
-      self.lib.seek(0, 2)
-    filelen = self.lib.tell()
-    if self.len:
-      self.lib.seek(self.start+self.len-self.TRLLEN, 0)
-    else:
-      self.lib.seek(-self.TRLLEN, 2)
-    (magic, totallen, tocpos, toclen) = struct.unpack(self.TRLSTRUCT, 
-                                                self.lib.read(self.TRLLEN))
-    if magic != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    self.pkgstart = filelen - totallen
-    if self.len:
-      if totallen != self.len or self.pkgstart != self.start:
-        raise RuntimeError, "Problem with embedded archive in %s" % self.path
-    self.tocpos, self.toclen = tocpos, toclen
-
-  def loadtoc(self):
-    self.toc = self.TOCTMPLT()
-    self.lib.seek(self.pkgstart+self.tocpos)
-    tocstr = self.lib.read(self.toclen)
-    self.toc.frombinary(tocstr)
-
-  def extract(self, name):
-    if type(name) == type(''):
-      ndx = self.toc.find(name)
-      if ndx == -1:
-        return None
-    else:
-      ndx = name
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    self.lib.seek(self.pkgstart+dpos)
-    rslt = self.lib.read(dlen)
-    if flag == 1:
-      rslt = zlib.decompress(rslt)
-    if typcd == 'M':
-      return (1, rslt)
-    return (0, rslt)
-
-  def contents(self):
-    rslt = []
-    for (dpos, dlen, ulen, flag, typcd, nm) in self.toc:
-      rslt.append(nm)
-    return rslt
-
-##  def add(self, entry):
-##    (nm, pathnm, flag, typcd) = entry[:4]
-##    if flag == 2:
-##        s = open(pathnm, 'r').read()
-##        s = s + '\0'
-##    else:
-##        s = open(pathnm, 'rb').read()
-##    ulen = len(s)
-##    if flag == 1:
-##      s = zlib.compress(s, self.LEVEL)
-##    dlen = len(s)
-##    where = self.lib.tell()
-##    if typcd == 'm':
-##      if strop.find(pathnm, '.__init__.py') > -1:
-##        typcd = 'M'
-##    self.toc.add(where, dlen, ulen, flag, typcd, nm)
-##    self.lib.write(s)
-##
-##  def save_toc(self, tocpos):
-##    self.tocpos = tocpos
-##    tocstr = self.toc.tobinary()
-##    self.toclen = len(tocstr)
-##    self.lib.write(tocstr)
-##
-##  def save_trailer(self, tocpos):
-##    totallen = tocpos + self.toclen + self.TRLLEN
-##    trl = struct.pack(self.TRLSTRUCT, self.MAGIC, totallen, 
-##                      tocpos, self.toclen)
-##    self.lib.write(trl)
-
-  def openEmbedded(self, name):
-    ndx = self.toc.find(name)
-    if ndx == -1:
-      raise KeyError, "Member '%s' not found in %s" % (name, self.path)
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    if flag:
-      raise ValueError, "Cannot open compressed archive %s in place"
-    return CArchive(self.path, self.pkgstart+dpos, dlen)

+ 0 - 178
direct/src/pyinst/finder.py

@@ -1,178 +0,0 @@
-# copyright McMillan Enterprises, 1999
-import os, sys
-import string
-
-SCRIPT = 1
-GSCRIPT = 2
-MODULE = 3
-PACKAGE = 4
-PBINARY = 5
-BINARY = 6
-ZLIB = 7
-DIRECTORY = 8
-DATA = 9
-
-_bpath = None
-_ppath = None
-_pcache = {}
-
-def _locate(nm, xtrapath=None, base=None):
-    """Find a file / directory named NM in likely places.
-    
-       XTRAPATH is a list of paths to prepend to BASE.
-       If BASE is None, sys.path (as extended by packages) is used."""
-    ppath = base
-    if base is None:
-        ppath = _ppath
-    if xtrapath:
-        ppath = xtrapath + ppath
-    for pth in ppath:
-        fullnm = os.path.join(pth, nm)
-        #print " _locate trying", fullnm
-        if os.path.exists(fullnm):
-            break
-    else:
-        return ''
-    return fullnm
-
-def _locatepython(name, xtrapath=None):
-    """Locate a Python resource named NAME.
-    
-       All of the standard file extensions will be tried.
-       XTRAPATH is prepended to sys.path."""
-    for ext in ('.py', '.pyc', '.pyw', '.pyo', '.pyd', '.dll'):
-        fullnm = _locate(name+ext, xtrapath)
-        if fullnm:
-            break
-    else:
-        for ext in ('.pyd', '.dll'):
-            fullnm = _locate(name+ext, [], _bpath)
-            if fullnm:
-                break
-    return fullnm
-
-def ispackage(name):
-    """Determine if NAME is the name of a package."""
-    if os.path.exists(os.path.join(name, '__init__.py')):
-        return 1
-    if os.path.exists(os.path.join(name, '__init__.pyc')):
-        return 1
-    if os.path.exists(os.path.join(name, '__init__.pyo')):
-        return 1
-    return 0
-        
-def idtype(fullnm):
-    """Figure out what type of resource FULLNM refers to."""
-    if os.path.isdir(fullnm):
-        if ispackage(fullnm):
-            return PACKAGE
-        return DIRECTORY
-    ext = os.path.splitext(fullnm)[1]
-    if ext:
-        if ext == '.pyd':
-            return PBINARY
-        if ext == '.dll':
-            return BINARY
-        if ext in ('.pyc', '.pyo'):
-            return MODULE
-        if ext == '.py':
-            return SCRIPT
-        if ext == '.pyw':
-            return GSCRIPT
-        if ext == '.pyz':
-            return ZLIB
-    return DATA
-
-def identify(name, xtrapath=None):
-    """Find, and identify the type of NAME, using XTRAPATH as the
-       first place to look.
-
-       Return type, name and full path name.
-       NAME can be a logical or physical name. However, the logical
-       name of a Python module can easily conflict with the physical
-       name of something else, so beware."""
-    if os.path.exists(name):
-        fullnm = name
-    else:
-        if xtrapath is None:
-            xtra = []
-        elif id(xtrapath) in _pcache:
-            xtra = _pcache[id(xtrapath)]
-        else:
-            xtra = expand(xtrapath)
-            _pcache[id(xtrapath)] = xtra 
-        fullnm = _locate(name, xtra)
-        if not fullnm:
-            fullnm =  _locate(name, [], _bpath)
-            if not fullnm:
-                ext = os.path.splitext(name)[1]
-                if not ext:
-                    fullnm = _locatepython(name, xtra)
-                    if not fullnm:
-                        raise ValueError, "%s not found" % name
-                else:
-                    nm = name
-                    while string.count(nm, '.'):
-                        nm = string.replace(nm, '.', '/', 1)
-                        fullnm = _locatepython(nm, xtra)
-                        if fullnm:
-                            break
-                    else:
-                        raise ValueError, "%s not found" % name
-                    
-    typ = idtype(fullnm)
-    nm = name
-    if typ in (GSCRIPT, SCRIPT, MODULE, PACKAGE, PBINARY):
-        dir, nm = os.path.split(fullnm)
-        nm = os.path.splitext(nm)[0]
-    if typ == SCRIPT:
-        if os.path.exists(fullnm+'c') or os.path.exists(fullnm+'o'):
-            typ = MODULE
-    if typ in (MODULE, PACKAGE):
-        while idtype(dir) == PACKAGE:
-            dir, lnode = os.path.split(dir)
-            nm = lnode+'.'+nm
-    elif typ == BINARY:
-        nm = os.path.basename(fullnm)
-    return typ, nm, fullnm
- 
-def expand(plist):
-    """ expand a list of paths (like sys.path) to include all the 
-        directories that qualify as packages """
-    pkgdirs = []
-    for pth in plist:
-        os.path.walk(pth, pkgfinder, pkgdirs)
-    return plist + pkgdirs
-
-def pkgfinder(pkgdirs, dir, fnms):
-    i = 0
-    while i < len(fnms):
-        fnm = os.path.join(dir, fnms[i])
-        if os.path.isdir(fnm):
-            if ispackage(fnm):
-                pkgdirs.append(fnm)
-                i = i + 1
-            else:
-                del fnms[i]
-        else:
-            i = i + 1
-
-if _bpath is None:
-    try:
-        import win32api
-    except ImportError:
-        print "Cannot determine your Windows or System directories"
-        print "Please add them to your PATH if .dlls are not found"
-        _bpath = []
-    else:
-        sysdir = win32api.GetSystemDirectory()
-        sysdir2 = os.path.join(sysdir, '../SYSTEM')
-        windir = win32api.GetWindowsDirectory()
-        _bpath = [sysdir, sysdir2, windir]
-    _bpath.extend(string.split(os.environ.get('PATH', ''), ';'))
-if _ppath is None:
-    _ppath = expand(sys.path)
-        
-def getpath():
-    """Return the path that Windows will search for dlls."""
-    return _bpath

+ 0 - 138
direct/src/pyinst/icon.py

@@ -1,138 +0,0 @@
-# This code is courtesy of Thomas Heller, who
-# has kindly donated it to this project.
-RT_ICON = 3
-RT_GROUP_ICON = 14
-LOAD_LIBRARY_AS_DATAFILE = 2
-
-import struct
-
-class Structure:
-    def __init__ (self):
-        size = self._sizeInBytes = struct.calcsize (self._format_)
-        self._fields_ = list (struct.unpack (self._format_, '\000' * size))
-        indexes = self._indexes_ = {}
-        for i in range (len (self._names_)):
-            indexes[self._names_[i]] = i
-    def dump (self):
-        print "DUMP of", self
-        for name in self._names_:
-            if name[0] != '_':
-                print "%20s = %s" % (name, getattr (self, name))
-        print
-    def __getattr__ (self, name):
-        if name in self._names_:
-            index = self._indexes_[name]
-            return self._fields_[index]
-        try:
-            return self.__dict__[name]
-        except KeyError:
-            raise AttributeError, name
-    def __setattr__ (self, name, value):
-        if name in self._names_:
-            index = self._indexes_[name]
-            self._fields_[index] = value
-        else:
-            self.__dict__[name] = value
-    def tostring (self):
-        return apply (struct.pack, [self._format_,] + self._fields_)
-    def fromfile (self, file):
-        data = file.read (self._sizeInBytes)
-        self._fields_ = list (struct.unpack (self._format_, data))
-
-class ICONDIRHEADER (Structure):
-    _names_ = "idReserved", "idType", "idCount"
-    _format_ = "hhh"
-
-class ICONDIRENTRY (Structure):
-    _names_ = "bWidth", "bHeight", "bColorCount", "bReserved", "wPlanes", "wBitCount", "dwBytesInRes", "dwImageOffset"
-    _format_ = "bbbbhhii"
-
-class GRPICONDIR (Structure):
-    _names_ = "idReserved", "idType", "idCount"
-    _format_ = "hhh"
-
-class GRPICONDIRENTRY (Structure):
-    _names_ = "bWidth", "bHeight", "bColorCount", "bReserved", "wPlanes", "wBitCount", "dwBytesInRes", "nID"
-    _format_ = "bbbbhhih"
-
-class IconFile:
-    def __init__ (self, path):
-        self.path = path
-        file = open (path, "rb")
-        self.entries = []
-        self.images = []
-        header = self.header = ICONDIRHEADER()
-        header.fromfile (file)
-        for i in range (header.idCount):
-            entry = ICONDIRENTRY()
-            entry.fromfile (file)
-            self.entries.append (entry)
-        for e in self.entries:
-            file.seek (e.dwImageOffset, 0)
-            self.images.append (file.read (e.dwBytesInRes))
-
-    def grp_icon_dir (self):
-        return self.header.tostring()
-
-    def grp_icondir_entries (self):
-        data = ""
-        i = 1
-        for entry in self.entries:
-            e = GRPICONDIRENTRY()
-            for n in e._names_[:-1]:
-                setattr(e, n, getattr (entry, n))
-            e.nID = i
-            i = i + 1
-            data = data + e.tostring()
-        return data
-            
-
-def CopyIcons_FromIco (dstpath, srcpath):
-    f = IconFile (srcpath)
-    print "Updating icons from", srcpath, "to", dstpath
-    import win32api #, win32con
-    hdst = win32api.BeginUpdateResource (dstpath, 0)
-    data = f.grp_icon_dir()
-    data = data + f.grp_icondir_entries()
-    win32api.UpdateResource (hdst, RT_GROUP_ICON, 1, data)
-    print "Writing RT_GROUP_ICON resource with %d bytes" % len (data)
-    i = 1
-    for data in f.images:
-        win32api.UpdateResource (hdst, RT_ICON, i, data)
-        print "Writing RT_ICON resource with %d bytes" % len (data)
-        i = i + 1
-    win32api.EndUpdateResource (hdst, 0)
-
-def CopyIcons (dstpath, srcpath):
-    import os.path, string
-    index = None
-    try:
-        srcpath, index = map (string.strip, string.split (srcpath, ','))
-        index = int (index)
-    except:
-        pass
-    print "PATH, INDEX", srcpath, index
-    srcext = os.path.splitext (srcpath)[1]
-    if string.lower (srcext) == '.ico':
-        return CopyIcons_FromIco (dstpath, srcpath)
-    if index is not None:
-        print "Updating icons from", srcpath, ", %d to" % index, dstpath
-    else:
-        print "Updating icons from", srcpath, "to", dstpath
-    import win32api #, win32con
-    hdst = win32api.BeginUpdateResource (dstpath, 0)
-    hsrc = win32api.LoadLibraryEx (srcpath, 0, LOAD_LIBRARY_AS_DATAFILE)
-    if index is None:
-        grpname = win32api.EnumResourceNames (hsrc, RT_GROUP_ICON)[0]
-    elif index >= 0:
-        grpname = win32api.EnumResourceNames (hsrc, RT_GROUP_ICON)[index]
-    else:
-        grpname = -index
-    data = win32api.LoadResource (hsrc, RT_GROUP_ICON, grpname)
-    win32api.UpdateResource (hdst, RT_GROUP_ICON, grpname, data)
-    for iconname in win32api.EnumResourceNames (hsrc, RT_ICON):
-        data = win32api.LoadResource (hsrc, RT_ICON, iconname)
-        win32api.UpdateResource (hdst, RT_ICON, iconname, data)
-    win32api.FreeLibrary (hsrc)
-    win32api.EndUpdateResource (hdst, 0)
-

+ 0 - 487
direct/src/pyinst/imputil.py

@@ -1,487 +0,0 @@
-#
-# imputil.py
-#
-# Written by Greg Stein. Public Domain.
-# No Copyright, no Rights Reserved, and no Warranties.
-#
-# Utilities to help out with custom import mechanisms.
-#
-# Additional modifications were contribed by Marc-Andre Lemburg and
-# Gordon McMillan.
-#
-
-__version__ = '0.3'
-
-# note: avoid importing non-builtin modules
-import imp
-import sys
-import strop
-import __builtin__      ### why this instead of just using __builtins__ ??
-
-# for the DirectoryImporter
-import struct
-import marshal
-
-class Importer:
-  "Base class for replacing standard import functions."
-
-  def install(self):
-    self.__chain_import = __builtin__.__import__
-    self.__chain_reload = __builtin__.reload
-    __builtin__.__import__ = self._import_hook
-    __builtin__.reload = self._reload_hook
-
-  ######################################################################
-  #
-  # PRIVATE METHODS
-  #
-  def _import_hook(self, name, globals=None, locals=None, fromlist=None):
-    """Python calls this hook to locate and import a module.
-
-    This method attempts to load the (dotted) module name. If it cannot
-    find it, then it delegates the import to the next import hook in the
-    chain (where "next" is defined as the import hook that was in place
-    at the time this Importer instance was installed).
-    """
-
-    # determine the context of this import
-    parent = self._determine_import_context(globals)
-
-    # import the module within the context, or from the default context
-    top, tail = self._import_top_module(parent, name)
-    if top is None:
-      # the module was not found; delegate to the next import hook
-      return self.__chain_import(name, globals, locals, fromlist)
-
-    # the top module may be under the control of a different importer.
-    # if so, then defer to that importer for completion of the import.
-    # note it may be self, or is undefined so we (self) may as well
-    # finish the import.
-    importer = top.__dict__.get('__importer__', self)
-    return importer._finish_import(top, tail, fromlist)
-
-  def _finish_import(self, top, tail, fromlist):
-    # if "a.b.c" was provided, then load the ".b.c" portion down from
-    # below the top-level module.
-    bottom = self._load_tail(top, tail)
-
-    # if the form is "import a.b.c", then return "a"
-    if not fromlist:
-      # no fromlist: return the top of the import tree
-      return top
-
-    # the top module was imported by self, or it was not imported through
-    # the Importer mechanism and self is simply handling the import of
-    # the sub-modules and fromlist.
-    #
-    # this means that the bottom module was also imported by self, or we
-    # are handling things in the absence of a prior Importer
-    #
-    # ### why the heck are we handling it? what is the example scenario
-    # ### where this happens? note that we can't determine is_package()
-    # ### for non-Importer modules.
-    #
-    # since we imported/handled the bottom module, this means that we can
-    # also handle its fromlist (and reliably determine is_package()).
-
-    # if the bottom node is a package, then (potentially) import some modules.
-    #
-    # note: if it is not a package, then "fromlist" refers to names in
-    #       the bottom module rather than modules.
-    # note: for a mix of names and modules in the fromlist, we will
-    #       import all modules and insert those into the namespace of
-    #       the package module. Python will pick up all fromlist names
-    #       from the bottom (package) module; some will be modules that
-    #       we imported and stored in the namespace, others are expected
-    #       to be present already.
-    if self._is_package(bottom.__dict__):
-      self._import_fromlist(bottom, fromlist)
-
-    # if the form is "from a.b import c, d" then return "b"
-    return bottom
-
-  def _reload_hook(self, module):
-    "Python calls this hook to reload a module."
-
-    # reloading of a module may or may not be possible (depending on the
-    # importer), but at least we can validate that it's ours to reload
-    importer = module.__dict__.get('__importer__', None)
-    if importer is not self:
-      return self.__chain_reload(module)
-
-    # okay. it is ours, but we don't know what to do (yet)
-    ### we should blast the module dict and do another get_code(). need to
-    ### flesh this out and add proper docco...
-    raise SystemError, "reload not yet implemented"
-
-  def _determine_import_context(self, globals):
-    """Returns the context in which a module should be imported.
-
-    The context could be a loaded (package) module and the imported module
-    will be looked for within that package. The context could also be None,
-    meaning there is no context -- the module should be looked for as a
-    "top-level" module.
-    """
-
-    if not globals or \
-       globals.get('__importer__', None) is not self:
-      # globals does not refer to one of our modules or packages.
-      # That implies there is no relative import context, and it
-      # should just pick it off the standard path.
-      return None
-
-    # The globals refer to a module or package of ours. It will define
-    # the context of the new import. Get the module/package fqname.
-    parent_fqname = globals['__name__']
-
-    # for a package, return itself (imports refer to pkg contents)
-    if self._is_package(globals):
-      parent = sys.modules[parent_fqname]
-      assert globals is parent.__dict__
-      return parent
-
-    i = strop.rfind(parent_fqname, '.')
-
-    # a module outside of a package has no particular import context
-    if i == -1:
-      return None
-
-    # for a module in a package, return the package (imports refer to siblings)
-    parent_fqname = parent_fqname[:i]
-    parent = sys.modules[parent_fqname]
-    assert parent.__name__ == parent_fqname
-    return parent
-
-  def _import_top_module(self, parent, name):
-    """Locate the top of the import tree (relative or absolute).
-
-    parent defines the context in which the import should occur. See
-    _determine_import_context() for details.
-
-    Returns a tuple (module, tail). module is the loaded (top-level) module,
-    or None if the module is not found. tail is the remaining portion of
-    the dotted name.
-    """
-    i = strop.find(name, '.')
-    if i == -1:
-      head = name
-      tail = ""
-    else:
-      head = name[:i]
-      tail = name[i+1:]
-    if parent:
-      fqname = "%s.%s" % (parent.__name__, head)
-    else:
-      fqname = head
-    module = self._import_one(parent, head, fqname)
-    if module:
-      # the module was relative, or no context existed (the module was
-      # simply found on the path).
-      return module, tail
-    if parent:
-      # we tried relative, now try an absolute import (from the path)
-      module = self._import_one(None, head, head)
-      if module:
-        return module, tail
-
-    # the module wasn't found
-    return None, None
-
-  def _import_one(self, parent, modname, fqname):
-    "Import a single module."
-
-    # has the module already been imported?
-    try:
-      return sys.modules[fqname]
-    except KeyError:
-      pass
-
-    # load the module's code, or fetch the module itself
-    result = self.get_code(parent, modname, fqname)
-    if result is None:
-      return None
-
-    # did get_code() return an actual module? (rather than a code object)
-    is_module = type(result[1]) is type(sys)
-
-    # use the returned module, or create a new one to exec code into
-    if is_module:
-      module = result[1]
-    else:
-      module = imp.new_module(fqname)
-
-    ### record packages a bit differently??
-    module.__importer__ = self
-    module.__ispkg__ = result[0]
-
-    # if present, the third item is a set of values to insert into the module
-    if len(result) > 2:
-      module.__dict__.update(result[2])
-
-    # the module is almost ready... make it visible
-    sys.modules[fqname] = module
-
-    # execute the code within the module's namespace
-    if not is_module:
-      exec(result[1], module.__dict__)
-
-    # insert the module into its parent
-    if parent:
-      setattr(parent, modname, module)
-    return module
-
-  def _load_tail(self, m, tail):
-    """Import the rest of the modules, down from the top-level module.
-
-    Returns the last module in the dotted list of modules.
-    """
-    if tail:
-      for part in strop.splitfields(tail, '.'):
-        fqname = "%s.%s" % (m.__name__, part)
-        m = self._import_one(m, part, fqname)
-        if not m:
-          raise ImportError, "No module named " + fqname
-    return m
-
-  def _import_fromlist(self, package, fromlist):
-    'Import any sub-modules in the "from" list.'
-
-    # if '*' is present in the fromlist, then look for the '__all__' variable
-    # to find additional items (modules) to import.
-    if '*' in fromlist:
-      fromlist = list(fromlist) + list(package.__dict__.get('__all__', []))
-
-    for sub in fromlist:
-      # if the name is already present, then don't try to import it (it
-      # might not be a module!).
-      if sub != '*' and not hasattr(package, sub):
-        subname = "%s.%s" % (package.__name__, sub)
-        submod = self._import_one(package, sub, subname)
-        if not submod:
-          raise ImportError, "cannot import name " + subname
-
-  def _is_package(self, module_dict):
-    """Determine if a given module (dictionary) specifies a package.
-
-    The package status is in the module-level name __ispkg__. The module
-    must also have been imported by self, so that we can reliably apply
-    semantic meaning to __ispkg__.
-
-    ### weaken the test to issubclass(Importer)?
-    """
-    return module_dict.get('__importer__', None) is self and \
-           module_dict['__ispkg__']
-
-  ######################################################################
-  #
-  # METHODS TO OVERRIDE
-  #
-  def get_code(self, parent, modname, fqname):
-    """Find and retrieve the code for the given module.
-
-    parent specifies a parent module to define a context for importing. It
-    may be None, indicating no particular context for the search.
-
-    modname specifies a single module (not dotted) within the parent.
-
-    fqname specifies the fully-qualified module name. This is a (potentially)
-    dotted name from the "root" of the module namespace down to the modname.
-    If there is no parent, then modname==fqname.
-
-    This method should return None, a 2-tuple, or a 3-tuple.
-
-    * If the module was not found, then None should be returned.
-
-    * The first item of the 2- or 3-tuple should be the integer 0 or 1,
-      specifying whether the module that was found is a package or not.
-
-    * The second item is the code object for the module (it will be
-      executed within the new module's namespace). This item can also
-      be a fully-loaded module object (e.g. loaded from a shared lib).
-
-    * If present, the third item is a dictionary of name/value pairs that
-      will be inserted into new module before the code object is executed.
-      This provided in case the module's code expects certain values (such
-      as where the module was found). When the second item is a module
-      object, then these names/values will be inserted *after* the module
-      has been loaded/initialized.
-    """
-    raise RuntimeError, "get_code not implemented"
-
-
-######################################################################
-#
-# Simple function-based importer
-#
-class FuncImporter(Importer):
-  "Importer subclass to use a supplied function rather than method overrides."
-  def __init__(self, func):
-    self.func = func
-  def get_code(self, parent, modname, fqname):
-    return self.func(parent, modname, fqname)
-
-def install_with(func):
-  FuncImporter(func).install()
-
-
-######################################################################
-#
-# Base class for archive-based importing
-#
-class PackageArchiveImporter(Importer):
-  "Importer subclass to import from (file) archives."
-
-  def get_code(self, parent, modname, fqname):
-    if parent:
-      # if a parent "package" is provided, then we are importing a sub-file
-      # from the archive.
-      result = self.get_subfile(parent.__archive__, modname)
-      if result is None:
-        return None
-      if type(result) == type(()):
-        return (0,) + result
-      return 0, result
-
-    # no parent was provided, so the archive should exist somewhere on the
-    # default "path".
-    archive = self.get_archive(modname)
-    if archive is None:
-      return None
-    return 1, "", {'__archive__':archive}
-
-  def get_archive(self, modname):
-    """Get an archive of modules.
-
-    This method should locate an archive and return a value which can be
-    used by get_subfile to load modules from it. The value may be a simple
-    pathname, an open file, or a complex object that caches information
-    for future imports.
-
-    Return None if the archive was not found.
-    """
-    raise RuntimeError, "get_archive not implemented"
-
-  def get_subfile(self, archive, modname):
-    """Get code from a subfile in the specified archive.
-
-    Given the specified archive (as returned by get_archive()), locate
-    and return a code object for the specified module name.
-
-    A 2-tuple may be returned, consisting of a code object and a dict
-    of name/values to place into the target module.
-
-    Return None if the subfile was not found.
-    """
-    raise RuntimeError, "get_subfile not implemented"
-
-
-class PackageArchive(PackageArchiveImporter):
-  "PackageArchiveImporter subclass that refers to a specific archive."
-
-  def __init__(self, modname, archive_pathname):
-    self.__modname = modname
-    self.__path = archive_pathname
-
-  def get_archive(self, modname):
-    if modname == self.__modname:
-      return self.__path
-    return None
-
-  # get_subfile is passed the full pathname of the archive
-
-
-######################################################################
-#
-# Emulate the standard directory-based import mechanism
-#
-
-class DirectoryImporter(Importer):
-  "Importer subclass to emulate the standard importer."
-
-  def __init__(self, dir):
-    self.dir = dir
-    self.ext_char = __debug__ and 'c' or 'o'
-    self.ext = '.py' + self.ext_char
-
-  def get_code(self, parent, modname, fqname):
-    if parent:
-      dir = parent.__pkgdir__
-    else:
-      dir = self.dir
-
-    # pull the os module from our instance data. we don't do this at the
-    # top-level, because it isn't a builtin module (and we want to defer
-    # loading non-builtins until as late as possible).
-    try:
-      os = self.os
-    except AttributeError:
-      import os
-      self.os = os
-
-    pathname = os.path.join(dir, modname)
-    if os.path.isdir(pathname):
-      values = { '__pkgdir__': pathname }
-      ispkg = 1
-      pathname = os.path.join(pathname, '__init__')
-    else:
-      values = { }
-      ispkg = 0
-
-    t_py = self._timestamp(pathname + '.py')
-    t_pyc = self._timestamp(pathname + self.ext)
-    if t_py is None and t_pyc is None:
-      return None
-    code = None
-    if t_py is None or (t_pyc is not None and t_pyc >= t_py):
-      f = open(pathname + self.ext, 'rb')
-      if f.read(4) == imp.get_magic():
-        t = struct.unpack('<I', f.read(4))[0]
-        if t == t_py:
-          code = marshal.load(f)
-      f.close()
-    if code is None:
-      code = self._compile(pathname + '.py', t_py)
-    return ispkg, code, values
-
-  def _timestamp(self, pathname):
-    try:
-      s = self.os.stat(pathname)
-    except OSError:
-      return None
-    return long(s[8])
-
-  def _compile(self, pathname, timestamp):
-    codestring = open(pathname, 'r').read()
-    if codestring and codestring[-1] != '\n':
-      codestring = codestring + '\n'
-    code = __builtin__.compile(codestring, pathname, 'exec')
-
-    # try to cache the compiled code
-    try:
-      f = open(pathname + self.ext_char, 'wb')
-      f.write('\0\0\0\0')
-      f.write(struct.pack('<I', timestamp))
-      marshal.dump(code, f)
-      f.flush()
-      f.seek(0, 0)
-      f.write(imp.get_magic())
-      f.close()
-    except OSError:
-      pass
-
-    return code
-
-  def __repr__(self):
-    return '<%s.%s for "%s" at 0x%x>' % (self.__class__.__module__,
-                                         self.__class__.__name__,
-                                         self.dir,
-                                         id(self))
-
-def _test_dir():
-  "Debug/test function to create DirectoryImporters from sys.path."
-  path = sys.path[:]
-  path.reverse()
-  for d in path:
-    DirectoryImporter(d).install()
-
-######################################################################

+ 0 - 91
direct/src/pyinst/installutils.py

@@ -1,91 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# demo code - use as you please.
-import os
-import stat
-
-def copyFile(srcFiles, destFile, append=0):
-    '''
-    Copy one or more files to another file.  If srcFiles is a list, then all
-    will be concatenated together to destFile.  The append flag is also valid
-    for single file copies.
-
-    destFile will have the mode, ownership and timestamp of the last file
-    copied/appended.
-    '''
-    if type(srcFiles) == type([]):
-        # in case we need to overwrite on the first file...
-        copyFile(srcFiles[0], destFile, append)
-        for file in srcFiles[1:]:
-            copyFile(file, destFile, 1)
-        return
-
-    mode = 'wb'
-    if append:
-        mode = 'ab'
-    print " ", srcFiles, "->",
-    input = open(srcFiles, 'rb')
-    if input:
-        print destFile
-        output = open(destFile, mode)
-        while 1:
-            bytesRead = input.read(8192)
-            if bytesRead:
-                output.write(bytesRead)
-            else:
-                break
-
-        input.close()
-        output.close()
-
-        stats = os.stat(srcFiles)
-        os.chmod(destFile, stats[stat.ST_MODE])
-        try:        # FAT16 file systems have only one file time
-            os.utime(destFile, (stats[stat.ST_ATIME], stats[stat.ST_MTIME]))
-        except:
-            pass
-        try:        
-            os.chown(destFile, stats[stat.ST_UID], stats[stat.ST_GID])
-        except:
-            pass
-
-def ensure(dirct):
-    dirnm = dirct
-    plist = []
-    try:
-        while not os.path.exists(dirnm):
-            dirnm, base = os.path.split(dirnm)
-            if base == '':
-                break
-            plist.insert(0, base)
-        for d in plist:
-            dirnm = os.path.join(dirnm, d)
-            os.mkdir(dirnm)
-    except:
-        return 0
-    return 1
-
-def getinstalldir(prompt="Enter an installation directory: "):
-    while 1:
-        installdir = raw_input("Enter an installation directory: ")
-        installdir = os.path.normpath(installdir)
-        if ensure(installdir):
-            break
-        else:
-            print installdir, "is not a valid pathname"
-            r = raw_input("Try again (y/n)?: ")
-            if r in 'nN':
-                sys.exit(0)
-    return installdir
-
-def installCArchive(nm, basedir, suffixdir):
-    import carchive_rt
-    fulldir = os.path.join(basedir, suffixdir)
-    if ensure(fulldir):
-        pkg = carchive_rt.CArchive(nm)
-        for fnm in pkg.contents():
-            stuff = pkg.extract(fnm)[1]
-            outnm = os.path.join(fulldir, fnm)
-            if ensure(os.path.dirname(outnm)):
-                open(outnm, 'wb').write(stuff)
-        pkg = None
-        os.remove(nm)

+ 0 - 85
direct/src/pyinst/ltoc.py

@@ -1,85 +0,0 @@
-import os, sys, UserList
-import finder, tocfilter, resource
-
-class lTOC(UserList.UserList):
-    """ A class for managing lists of resources.
-        Should be a UserList subclass. Doh. 
-        Like a list, but has merge(other) and filter() methods """
-    def __init__(self, reslist=None, filters=None):
-        UserList.UserList.__init__(self, reslist)
-        self.filters = []
-        if filters is not None:
-            self.filters = filters[:]
-    def prepend(self, res):
-        self.resources.insert(0, res)
-    def merge(self, other):
-        ' merge in another ltoc, discarding dups and preserving order '
-        tmp = {}
-        for res in self.data:
-            tmp[res.name] = 0
-        for res in other:
-            if tmp.get(res.name, 1):
-                self.data.append(res)
-                tmp[res.name] = 0
-    def filter(self):
-        ' invoke all filters '
-        for i in range(len(self.data)):
-            res = self.data[i]
-            if res:
-                for f in self.filters:
-                    if f.matches(res):
-                        self.data[i] = None
-                        break
-        self.data = filter(None, self.data)
-        return self
-    def unique(self):
-        ' remove all duplicate entries, preserving order '
-        new = self.__class__()
-        new.merge(self)
-        self.data = new.data
-    def toList(self):
-        ' return self as a list of (name, path, typ) '
-        tmp = []
-        for res in self.data:
-            tmp.append((res.name, res.path, res.typ))
-        return tmp
-    def addFilter(self, filter):
-        if type(filter) == type(''):
-            self.filters.append(finder.makeresource(filter).asFilter())
-        else:
-            if type(filter) == type(self):
-                if isinstance(filter, tocfilter._Filter):
-                    self.filters.append(filter)
-                elif isinstance(filter, resource.resource):
-                    self.filters.append(filter.asFilter())
-                else:
-                    raise ValueError, "can't make filter from %s", repr(filter)
-            else:
-                raise ValueError, "can't make filter from %s", repr(filter)
-        print " added filter", repr(self.filters[-1])             
-            
-   
-if __name__ == '__main__':
-    sys.path.insert(0, '.')
-    import finder
-    import pprint
-    s = finder.scriptresource('finder.py', './finder.py')
-    ##    pyltoc = lTOC(s.modules)
-    ##    l1 = pyltoc.toList()
-    ##    print "Raw py ltoc:", pprint.pprint(l1)
-    ##    f1 = ModFilter(['dospath', 'macpath', 'posixpath'])
-    ##    l2 = lTOC(s.modules).filter(f1).toList()
-    ##    print "Filter out dospath, macpath, posixpath:", pprint.pprint(l2)
-    ##    f2 = DirFilter(['.'])
-    ##    l3 = lTOC(s.modules).filter(f2).toList()
-    ##    print "Filter out current dir:", pprint.pprint(l3)
-    ##    f3 = StdLibFilter()
-    ##    l4 = lTOC(s.modules).filter(f3).toList()
-    ##    print "Filter out stdlib:", pprint.pprint(l4)
-    ##    #print "Filter out current dir and stdlib:", lTOC(s.modules).filter(f2, f3).toList()
-    binltoc = lTOC(s.binaries)
-    print "Raw bin ltoc:", pprint.pprint(binltoc.toList())
-    binltoc.addFilter('c:/winnt/system32')
-    pprint.pprint(binltoc.filter().toList())
-    
-    

+ 0 - 42
direct/src/pyinst/mkarchive.py

@@ -1,42 +0,0 @@
-#import MkWrap
-import imputil
-import strop
-import zlib
-import os
-import marshal
-
-class MkImporter:
-    def __init__(self, db, viewnm='pylib'):
-        self.db = db
-        self.view = db.getas(viewnm+'[name:S, ispkg:I, code:M]') # an MkWrap view object
-    def setImportHooks(self):
-        imputil.FuncImporter(self.get_code).install()
-    def get_code(self, parent, modname, fqname):
-        if self.view is None:
-            return None
-        ndx = self.view.search(name=fqname)
-        if ndx < len(self.view):
-            row = self.view[ndx]
-            if row.name == fqname:
-                return (row.ispkg, marshal.loads(zlib.decompress(row.code)))
-        return None
-    def build(self, lTOC):
-        for entry in lTOC:
-            nm, fnm = entry[0], entry[1]
-            ispkg = os.path.splitext(os.path.basename(fnm))[0] == '__init__'
-            ndx = self.view.search(name=nm)
-            if ndx < len(self.view):
-                row = self.view[ndx]
-                if row.name != nm:
-                    self.view.insert(ndx, {})
-                    row = self.view[ndx]
-            else:
-                ndx = self.view.append({})
-                row = self.view[ndx]
-            row.name = nm
-            row.ispkg = ispkg
-            f = open(fnm, 'rb')
-            f.seek(8)
-            obj = zlib.compress(f.read(), 9)
-            row.code = obj
-        self.db.commit()

+ 0 - 436
direct/src/pyinst/modulefinder.py

@@ -1,436 +0,0 @@
-"""Find modules used by a script, using introspection."""
-
-import dis
-import imp
-import marshal
-import os
-import re
-import string
-import sys
-
-if sys.platform=="win32":
-    # On Windows, we can locate modules in the registry with
-    # the help of the win32api package.
-    try:
-        import win32api
-    except ImportError:
-        print "The win32api module is not available - modules listed"
-        print "in the registry will not be found."
-        win32api = None
-
-
-IMPORT_NAME = dis.opname.index('IMPORT_NAME')
-IMPORT_FROM = dis.opname.index('IMPORT_FROM')
-
-# Modulefinder does a good job at simulating Python's, but it can not
-# handle __path__ modifications packages make at runtime.  Therefore there
-# is a mechanism whereby you can register extra paths in this map for a
-# package, and it will be honoured.
-
-# Note this is a mapping is lists of paths.
-packagePathMap = {}
-
-# A Public interface
-def AddPackagePath(packagename, path):
-    paths = packagePathMap.get(packagename, [])
-    paths.append(path)
-    packagePathMap[packagename] = paths
-
-class Module:
-
-    def __init__(self, name, file=None, path=None):
-        self.__name__ = name
-        self.__file__ = file
-        self.__path__ = path
-        self.__code__ = None
-
-    def __repr__(self):
-        s = "Module(%s" % repr(self.__name__)
-        if self.__file__ is not None:
-            s = s + ", %s" % repr(self.__file__)
-        if self.__path__ is not None:
-            s = s + ", %s" % repr(self.__path__)
-        s = s + ")"
-        return s
-
-
-class ModuleFinder:
-
-    def __init__(self, path=None, debug=0, excludes = []):
-        if path is None:
-            path = sys.path
-        self.path = path
-        self.modules = {}
-        self.badmodules = {}
-        self.debug = debug
-        self.indent = 0
-        self.excludes = excludes
-
-    def msg(self, level, str, *args):
-        if level <= self.debug:
-            for i in range(self.indent):
-                print "   ",
-            print str,
-            for arg in args:
-                print repr(arg),
-            print
-
-    def msgin(self, *args):
-        level = args[0]
-        if level <= self.debug:
-            self.indent = self.indent + 1
-            apply(self.msg, args)
-
-    def msgout(self, *args):
-        level = args[0]
-        if level <= self.debug:
-            self.indent = self.indent - 1
-            apply(self.msg, args)
-
-    def run_script(self, pathname):
-        self.msg(2, "run_script", pathname)
-        fp = open(pathname)
-        stuff = ("", "r", imp.PY_SOURCE)
-        self.load_module('__main__', fp, pathname, stuff)
-
-    def load_file(self, pathname):
-        dir, name = os.path.split(pathname)
-        name, ext = os.path.splitext(name)
-        fp = open(pathname)
-        stuff = (ext, "r", imp.PY_SOURCE)
-        self.load_module(name, fp, pathname, stuff)
-
-    def import_hook(self, name, caller=None, fromlist=None):
-        self.msg(3, "import_hook", name, caller, fromlist)
-        parent = self.determine_parent(caller)
-        q, tail = self.find_head_package(parent, name)
-        m = self.load_tail(q, tail)
-        if not fromlist:
-            return q
-        if m.__path__:
-            self.ensure_fromlist(m, fromlist)
-
-    def determine_parent(self, caller):
-        self.msgin(4, "determine_parent", caller)
-        if not caller:
-            self.msgout(4, "determine_parent -> None")
-            return None
-        pname = caller.__name__
-        if caller.__path__:
-            parent = self.modules[pname]
-            assert caller is parent
-            self.msgout(4, "determine_parent ->", parent)
-            return parent
-        if '.' in pname:
-            i = string.rfind(pname, '.')
-            pname = pname[:i]
-            parent = self.modules[pname]
-            assert parent.__name__ == pname
-            self.msgout(4, "determine_parent ->", parent)
-            return parent
-        self.msgout(4, "determine_parent -> None")
-        return None
-
-    def find_head_package(self, parent, name):
-        self.msgin(4, "find_head_package", parent, name)
-        if '.' in name:
-            i = string.find(name, '.')
-            head = name[:i]
-            tail = name[i+1:]
-        else:
-            head = name
-            tail = ""
-        if parent:
-            qname = "%s.%s" % (parent.__name__, head)
-        else:
-            qname = head
-        q = self.import_module(head, qname, parent)
-        if q:
-            self.msgout(4, "find_head_package ->", (q, tail))
-            return q, tail
-        if parent:
-            qname = head
-            parent = None
-            q = self.import_module(head, qname, parent)
-            if q:
-                self.msgout(4, "find_head_package ->", (q, tail))
-                return q, tail
-        self.msgout(4, "raise ImportError: No module named", qname)
-        raise ImportError, "No module named " + qname
-
-    def load_tail(self, q, tail):
-        self.msgin(4, "load_tail", q, tail)
-        m = q
-        while tail:
-            i = string.find(tail, '.')
-            if i < 0: i = len(tail)
-            head, tail = tail[:i], tail[i+1:]
-            mname = "%s.%s" % (m.__name__, head)
-            m = self.import_module(head, mname, m)
-            if not m:
-                self.msgout(4, "raise ImportError: No module named", mname)
-                raise ImportError, "No module named " + mname
-        self.msgout(4, "load_tail ->", m)
-        return m
-
-    def ensure_fromlist(self, m, fromlist, recursive=0):
-        self.msg(4, "ensure_fromlist", m, fromlist, recursive)
-        for sub in fromlist:
-            if sub == "*":
-                if not recursive:
-                    all = self.find_all_submodules(m)
-                    if all:
-                        self.ensure_fromlist(m, all, 1)
-            elif not hasattr(m, sub):
-                subname = "%s.%s" % (m.__name__, sub)
-                submod = self.import_module(sub, subname, m)
-                if not submod:
-                    raise ImportError, "No module named " + subname
-
-    def find_all_submodules(self, m):
-        if not m.__path__:
-            return
-        modules = {}
-        suffixes = [".py", ".pyc", ".pyo"]
-        for dir in m.__path__:
-            try:
-                names = os.listdir(dir)
-            except os.error:
-                self.msg(2, "can't list directory", dir)
-                continue
-            for name in names:
-                mod = None
-                for suff in suffixes:
-                    n = len(suff)
-                    if name[-n:] == suff:
-                        mod = name[:-n]
-                        break
-                if mod and mod != "__init__":
-                    modules[mod] = mod
-        return modules.keys()
-
-    def import_module(self, partname, fqname, parent):
-        self.msgin(3, "import_module", partname, fqname, parent)
-        try:
-            m = self.modules[fqname]
-        except KeyError:
-            pass
-        else:
-            self.msgout(3, "import_module ->", m)
-            return m
-        if fqname in self.badmodules:
-            self.msgout(3, "import_module -> None")
-            self.badmodules[fqname][parent.__name__] = None
-            return None
-        try:
-            fp, pathname, stuff = self.find_module(partname,
-                                                   parent and parent.__path__)
-        except ImportError:
-            self.msgout(3, "import_module ->", None)
-            return None
-        try:
-            m = self.load_module(fqname, fp, pathname, stuff)
-        finally:
-            if fp: fp.close()
-        if parent:
-            setattr(parent, partname, m)
-        self.msgout(3, "import_module ->", m)
-        return m
-
-    def load_module(self, fqname, fp, pathname, (suffix, mode, type)):
-        self.msgin(2, "load_module", fqname, fp and "fp", pathname)
-        if type == imp.PKG_DIRECTORY:
-            m = self.load_package(fqname, pathname)
-            self.msgout(2, "load_module ->", m)
-            return m
-        if type == imp.PY_SOURCE:
-            co = compile(fp.read()+'\n', pathname, 'exec')
-        elif type == imp.PY_COMPILED:
-            if fp.read(4) != imp.get_magic():
-                self.msgout(2, "raise ImportError: Bad magic number", pathname)
-                raise ImportError, "Bad magic number in %s", pathname
-            fp.read(4)
-            co = marshal.load(fp)
-        else:
-            co = None
-        m = self.add_module(fqname)
-        m.__file__ = pathname
-        if co:
-            m.__code__ = co
-            self.scan_code(co, m)
-        self.msgout(2, "load_module ->", m)
-        return m
-
-    def scan_code(self, co, m):
-        code = co.co_code
-        n = len(code)
-        i = 0
-        lastname = None
-        while i < n:
-            c = code[i]
-            i = i+1
-            op = ord(c)
-            if op >= dis.HAVE_ARGUMENT:
-                oparg = ord(code[i]) + ord(code[i+1])*256
-                i = i+2
-            if op == IMPORT_NAME:
-                name = lastname = co.co_names[oparg]
-                if lastname not in self.badmodules:
-                    try:
-                        self.import_hook(name, m)
-                    except ImportError, msg:
-                        self.msg(2, "ImportError:", str(msg))
-                        if name not in self.badmodules:
-                            self.badmodules[name] = {}
-                        self.badmodules[name][m.__name__] = None
-            elif op == IMPORT_FROM:
-                name = co.co_names[oparg]
-                assert lastname is not None
-                if lastname not in self.badmodules:
-                    try:
-                        self.import_hook(lastname, m, [name])
-                    except ImportError, msg:
-                        self.msg(2, "ImportError:", str(msg))
-                        fullname = lastname + "." + name
-                        if fullname not in self.badmodules:
-                            self.badmodules[fullname] = {}
-                        self.badmodules[fullname][m.__name__] = None
-            else:
-                lastname = None
-        for c in co.co_consts:
-            if isinstance(c, type(co)):
-                self.scan_code(c, m)
-
-    def load_package(self, fqname, pathname):
-        self.msgin(2, "load_package", fqname, pathname)
-        m = self.add_module(fqname)
-        m.__file__ = pathname
-        m.__path__ = [pathname]
-
-        # As per comment at top of file, simulate runtime __path__ additions.
-        m.__path__ = m.__path__ + packagePathMap.get(fqname, [])
-
-        fp, buf, stuff = self.find_module("__init__", m.__path__)
-        self.load_module(fqname, fp, buf, stuff)
-        self.msgout(2, "load_package ->", m)
-        return m
-
-    def add_module(self, fqname):
-        if fqname in self.modules:
-            return self.modules[fqname]
-        self.modules[fqname] = m = Module(fqname)
-        return m
-
-    def find_module(self, name, path):
-        if name in self.excludes:
-            self.msgout(3, "find_module -> Excluded")
-            raise ImportError, name
-
-        if path is None:
-            if name in sys.builtin_module_names:
-                return (None, None, ("", "", imp.C_BUILTIN))
-
-            # Emulate the Registered Module support on Windows.
-            if sys.platform=="win32" and win32api is not None:
-                HKEY_LOCAL_MACHINE = 0x80000002
-                try:
-                    pathname = win32api.RegQueryValue(HKEY_LOCAL_MACHINE, "Software\\Python\\PythonCore\\%s\\Modules\\%s" % (sys.winver, name))
-                    fp = open(pathname, "rb")
-                    # XXX - To do - remove the hard code of C_EXTENSION.
-                    stuff = "", "rb", imp.C_EXTENSION
-                    return fp, pathname, stuff
-                except win32api.error:
-                    pass
-
-            path = self.path
-        return imp.find_module(name, path)
-
-    def report(self):
-        print
-        print "  %-25s %s" % ("Name", "File")
-        print "  %-25s %s" % ("----", "----")
-        # Print modules found
-        keys = self.modules.keys()
-        keys.sort()
-        for key in keys:
-            m = self.modules[key]
-            if m.__path__:
-                print "P",
-            else:
-                print "m",
-            print "%-25s" % key, m.__file__ or ""
-
-        # Print missing modules
-        keys = self.badmodules.keys()
-        keys.sort()
-        for key in keys:
-            # ... but not if they were explicitely excluded.
-            if key not in self.excludes:
-                mods = self.badmodules[key].keys()
-                mods.sort()
-                print "?", key, "from", string.join(mods, ', ')
-
-
-def test():
-    # Parse command line
-    import getopt
-    try:
-        opts, args = getopt.getopt(sys.argv[1:], "dmp:qx:")
-    except getopt.error, msg:
-        print msg
-        return
-
-    # Process options
-    debug = 1
-    domods = 0
-    addpath = []
-    exclude = []
-    for o, a in opts:
-        if o == '-d':
-            debug = debug + 1
-        if o == '-m':
-            domods = 1
-        if o == '-p':
-            addpath = addpath + string.split(a, os.pathsep)
-        if o == '-q':
-            debug = 0
-        if o == '-x':
-            exclude.append(a)
-
-    # Provide default arguments
-    if not args:
-        script = "hello.py"
-    else:
-        script = args[0]
-
-    # Set the path based on sys.path and the script directory
-    path = sys.path[:]
-    path[0] = os.path.dirname(script)
-    path = addpath + path
-    if debug > 1:
-        print "path:"
-        for item in path:
-            print "   ", repr(item)
-
-    # Create the module finder and turn its crank
-    mf = ModuleFinder(path, debug, exclude)
-    for arg in args[1:]:
-        if arg == '-m':
-            domods = 1
-            continue
-        if domods:
-            if arg[-2:] == '.*':
-                mf.import_hook(arg[:-2], None, ["*"])
-            else:
-                mf.import_hook(arg)
-        else:
-            mf.load_file(arg)
-    mf.run_script(script)
-    mf.report()
-
-
-if __name__ == '__main__':
-    try:
-        test()
-    except KeyboardInterrupt:
-        print "\n[interrupt]"

+ 0 - 317
direct/src/pyinst/resource.py

@@ -1,317 +0,0 @@
-import os
-import string
-import archivebuilder
-import carchive
-import tocfilter
-import bindepend
-import finder
-
-_cache = {}
-
-def makeresource(name, xtrapath=None):
-    """Factory function that returns a resource subclass.
-
-       NAME is the logical or physical name of a resource.
-       XTRAPTH is a path or list of paths to search first.
-       return one of the resource subclasses.
-       Warning - logical names can conflict; archive might return a directory,
-       when the module archive.py was desired."""
-    typ, nm, fullname = finder.identify(name, xtrapath)
-    fullname = os.path.normpath(fullname)
-    if fullname in _cache:
-        return _cache[fullname]
-    elif typ in (finder.SCRIPT, finder.GSCRIPT):
-        rsrc = scriptresource(nm, fullname)
-    elif typ == finder.MODULE:
-        rsrc = moduleresource(nm, fullname)
-    elif typ == finder.PACKAGE:
-        rsrc = pkgresource(nm, fullname)
-    elif typ in (finder.PBINARY, finder.BINARY):
-        rsrc = binaryresource(nm, fullname)
-    elif typ == finder.ZLIB:
-        rsrc = zlibresource(nm, fullname)
-    elif typ == finder.DIRECTORY:
-        rsrc = dirresource(nm, fullname)
-    else:
-        try:
-            carchive.CArchive(fullname)
-        except:
-            rsrc = dataresource(nm, fullname)
-        else:
-            rsrc = archiveresource(nm, fullname)
-    _cache[fullname] = rsrc
-    return rsrc
-
-class resource:
-    """ Base class for all resources.
-
-        contents() returns of list of what's contained (eg files in dirs)
-        dependencies() for Python resources returns a list of moduleresources
-         and binaryresources """
-    def __init__(self, name, path, typ):
-        """NAME is the logical name of the resource.
-           PATH is the full path to the resource.
-           TYP is the type code.
-           No editting or sanity checks."""
-        self.name = name
-        self.path = path
-        self.typ = typ
-    def __repr__(self):
-        return "(%(name)s, %(path)s, %(typ)s)" % self.__dict__
-    def contents(self):
-        """A list of resources within this resource.
-
-           Overridable.
-           Base implementation returns [self]"""
-        return [self]
-    def dependencies(self):
-        """A list of resources this resource requires.
-
-           Overridable.
-           Base implementation returns []"""
-        return []
-    def __cmp__(self, other):
-        if not isinstance(other, self.__class__):
-            return -1
-        return cmp((self.typ, self.name), (other.typ, other.name))
-    def asFilter(self):
-        """Create a tocfilter based on self.
-
-           Pure virtual"""
-        raise NotImplementedError
-    def asSource(self):
-        """Return self in source form.
-
-           Base implementation returns self"""
-        return self
-    def asBinary(self):
-        """Return self in binary form.
-
-           Base implementation returns self"""
-        return self
-
-class pythonresource(resource):
-    """An empty base class.
-
-       Used to classify resources."""
-    pass
-
-
-class scriptresource(pythonresource):
-    """ A top-level python resource.
-
-        Has (lazily computed) attributes, modules and binaries, which together
-        are the scripts dependencies() """
-    def __init__(self, name, fullname):
-        resource.__init__(self, name, fullname, 's')
-    def __getattr__(self, name):
-        if name == 'modules':
-            print "Analyzing python dependencies of", self.name, self.path
-            self.modules = []
-            self._binaries = []
-            nodes = string.split(self.name, '.')[:-1] # MEInc.Dist.archive -> ['MEInc', 'Dist']
-            for i in range(len(nodes)):
-                nm = string.join(nodes[:i+1], '.')
-                rsrc = makeresource(nm+'.__init__')
-                rsrc.name = nm
-                self.modules.append(rsrc)
-            for (nm, path) in archivebuilder.Dependencies(self.path):
-                path = os.path.normcase(os.path.abspath(path))
-                if os.path.splitext(path)[1] == '.py':
-                    self.modules.append(moduleresource(nm, path))
-                else:
-                    self._binaries.append(binaryresource(nm, path))
-            return self.modules
-        elif name == 'binaries':
-            x = self.modules
-            tmp = {}
-            for br in self._binaries:
-                tmp[br.name] = br
-                for br2 in br.dependencies():
-                    tmp[br2.name] = br2
-            self.binaries = tmp.values()
-            return self.binaries
-        else:
-            raise AttributeError, "%s" % name
-    def dependencies(self):
-        """Return all dependencies (Python and binary) of self."""
-        return self.modules + self.binaries
-    def asFilter(self):
-        """Return a ModFilter based on self."""
-        return tocfilter.ModFilter([self.name])
-    def asSource(self):
-        """Return self as a dataresource (ie, a text file wrapper)."""
-        r = dataresource(self.path)
-        r.name = apply(os.path.join, string.split(self.name, '.')[:-1]+[r.name])
-        return r
-
-class moduleresource(scriptresource):
-    """ A module resource (differs from script in that it will generally
-        be worked with as a .pyc instead of in source form) """
-    def __init__(self, name, fullname):
-        resource.__init__(self, name, fullname, 'm')
-    def asBinary(self):
-        """Return self as a dataresource (ie, a binary file wrapper)."""
-        r = dataresource(self.path)
-        r.name = os.path.basename(r.name)
-        r.typ = 'b'
-        return r
-    def asSource(self):
-        """Return self as a scriptresource (ie, uncompiled form)."""
-        return scriptresource(self.name, self.path[:-1]).asSource()
-
-class binaryresource(resource):
-    """A .dll or .pyd.
-
-       dependencies() yields more binaryresources """
-    def __init__(self, name, fullname):
-        if string.find(name, '.') == -1:
-            pth, bnm = os.path.split(fullname)
-            junk, ext = os.path.splitext(bnm)
-            fullname = os.path.join(pth, name + ext)
-        resource.__init__(self, name, fullname, 'b')
-        self._depends = None
-    def dependencies(self):
-        """Return a list of binary dependencies."""
-        if self._depends is not None:
-            return self._depends
-        self._depends = []
-        for (lib, path) in bindepend.Dependencies([(self.name, self.path)]):
-            self._depends.append(binaryresource(lib, path))
-        return self._depends
-    def asFilter(self):
-        """Create a FileFilter from self."""
-        return tocfilter.FileFilter([self.name])
-
-class dataresource(resource):
-    """A subclass for arbitrary files. """
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'x')
-    def asFilter(self):
-        """Create a FileFilter from self."""
-        return tocfilter.FileFilter([self.name])
-
-class archiveresource(dataresource):
-    """A sublcass for CArchives. """
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'a')
-
-class zlibresource(dataresource):
-    """A subclass for ZlibArchives. """
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'z')
-
-class dirresource(resource):
-    """A sublcass for a directory.
-
-       Generally transformed to a list of files through
-        contents() and filtered by file extensions or resource type.
-        Note that contents() is smart enough to regard a .py and .pyc
-        as the same resource. """
-    RECURSIVE = 0
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'd')
-        self._contents = None
-    def contents(self, prefix=''):
-        """Return the list of (typed) resources in self.name"""
-        if self._contents is not None:
-            return self._contents
-        self._contents = []
-        flist = os.listdir(self.path)
-        for fnm in flist:
-            try:
-                bnm, ext = os.path.splitext(fnm)
-                if ext == '.py' and (bnm+'.pyc' in flist or bnm+'.pyo' in flist):
-                    pass
-                elif ext == '.pyo' and (bnm + '.pyc' in flist):
-                    pass
-                else:
-                    rsrc = makeresource(os.path.join(self.path, fnm))
-                    if isinstance(rsrc, pkgresource):
-                        rsrc = self.__class__(rsrc.path)
-                    if self.RECURSIVE:
-                        if isinstance(rsrc, moduleresource) or isinstance(rsrc, scriptresource):
-                            rsrc = rsrc.asSource()
-                            fnm = os.path.basename(rsrc.path)
-                        rsrc.name = os.path.join(prefix, fnm)
-                        if rsrc.typ == 'd':
-                            rsrc.RECURSIVE = 1
-                            self._contents.extend(rsrc.contents(rsrc.name))
-                        else:
-                            self._contents.append(rsrc)
-                    else:
-                        self._contents.append(rsrc)
-            except ValueError, e:
-                raise RuntimeError, "Can't make resource from %s\n ValueError: %s" \
-                      % (os.path.join(self.path, fnm), repr(e.args))
-        return self._contents
-    def asFilter(self):
-        return tocfilter.DirFilter([self.path])
-
-class treeresource(dirresource):
-    """A subclass for a directory and subdirectories."""
-    RECURSIVE = 1
-    def __init__(self, name, fullname=None):
-        dirresource.__init__(self, name, fullname)
-
-class pkgresource(pythonresource):
-    """A Python package.
-
-        Note that contents() can be fooled by fancy __path__ statements. """
-    def __init__(self, nm, fullname):
-        resource.__init__(self, nm, fullname, 'p')
-        self._contents = None
-        self._depends = None
-    def contents(self, parent=None):
-        """Return a list of subpackages and modules in self."""
-        if self._contents is not None:
-            return self._contents
-        if parent is None:
-            parent = self.name
-        self._contents = []
-        cheat = treeresource(self.path)
-        for rsrc in cheat.contents():
-            if os.path.splitext(rsrc.path)[1] == '.py':
-                rsrc = moduleresource(string.replace(rsrc.name[:-3], os.sep, '.'),
-                                      rsrc.path)
-                if rsrc.name[-8:] == '__init__':
-                    rsrc.name = rsrc.name[:-9]
-            elif os.path.isdir(rsrc.path):
-                rsrc = makeresource(rsrc.path)
-            else:
-                continue
-            if rsrc.name:
-                rsrc.name = parent + '.' + rsrc.name
-            else:
-                rsrc.name = parent
-            if rsrc.typ == 'm':
-                self._contents.append(rsrc)
-            elif rsrc.typ == 'p':
-                self._contents.extend(rsrc.contents(rsrc.name))
-        return self._contents
-    def dependencies(self):
-        """Return the list of accumulated dependencies of all modules in self."""
-        if self._depends is not None:
-            return self._depends
-        self._depends = []
-        tmp = {}
-        for rsrc in self.contents():
-            for r in rsrc.dependencies():
-                tmp[r.name] = r
-        self._depends = tmp.values()
-        return self._depends
-    def asFilter(self):
-        """Create a PkgFilter from self."""
-        return tocfilter.PkgFilter([os.path.dirname(self.path)])
-
-
-
-
-
-
-
-if __name__ == '__main__':
-    s = scriptresource('finder.py', './finder.py')
-    print "s.modules:", s.modules
-    print "s.binaries:", s.binaries
-

+ 0 - 131
direct/src/pyinst/tocfilter.py

@@ -1,131 +0,0 @@
-import os
-import finder
-import re
-import sys
-
-def makefilter(name, xtrapath=None):
-    typ, nm, fullname = finder.identify(name, xtrapath)
-    if typ in (finder.SCRIPT, finder.GSCRIPT, finder.MODULE):
-        return ModFilter([os.path.splitext(nm)[0]])
-    if typ == finder.PACKAGE:
-        return PkgFilter([fullname])
-    if typ == finder.DIRECTORY:
-        return DirFilter([fullname])
-    if typ in (finder.BINARY, finder.PBINARY):
-        return FileFilter([nm])
-    return FileFilter([fullname])
-  
-class _Filter:
-    def __repr__(self):
-        return '<'+self.__class__.__name__+' '+repr(self.elements)+'>'
-    
-class _NameFilter(_Filter):
-    """ A filter mixin that matches (exactly) on name """
-    def matches(self, res):
-        return self.elements.get(res.name, 0)
-        
-class _PathFilter(_Filter):
-    """ A filter mixin that matches if the resource is below any of the paths"""
-    def matches(self, res):
-        p = os.path.normcase(os.path.abspath(res.path))
-        while len(p) > 3:
-            p = os.path.dirname(p)
-            if self.elements.get(p, 0):
-                return 1
-        return 0
-        
-class _ExtFilter(_Filter):
-    """ A filter mixin that matches based on file extensions (either way) """
-    include = 0
-    def matches(self, res):
-        fnd = self.elements.get(os.path.splitext(res.path)[1], 0)
-        if self.include:
-            return not fnd
-        return fnd
-    
-class _TypeFilter(_Filter):
-    """ A filter mixin that matches on resource type (either way) """
-    include = 0
-    def matches(self, res):
-        fnd = self.elements.get(res.typ, 0)
-        if self.include:
-            return not fnd
-        return fnd
-
-class _PatternFilter(_Filter):
-    """ A filter that matches if re.search succeeds on the resource path """
-    def matches(self, res):
-        for regex in self.elements:
-            if regex.search(res.path):
-                return 1
-        return 0
-    
-class ExtFilter(_ExtFilter):
-    """ A file extension filter.
-        ExtFilter(extlist, include=0)
-        where extlist is a list of file extensions """
-    def __init__(self, extlist, include=0):
-        self.elements = {}
-        for ext in extlist:
-            if ext[0:1] != '.':
-                ext = '.'+ext
-            self.elements[ext] = 1
-        self.include = include
-
-class TypeFilter(_TypeFilter):
-    """ A filter for resource types.
-        TypeFilter(typlist, include=0)
-        where typlist is a subset of ['a','b','d','m','p','s','x','z'] """
-    def __init__(self, typlist, include=0):
-        self.elements = {}
-        for typ in typlist:
-            self.elements[typ] = 1
-        self.include = include
-
-class FileFilter(_NameFilter):
-    """ A filter for data files """
-    def __init__(self, filelist):
-        self.elements = {}
-        for f in filelist:
-            self.elements[f] = 1
-              
-class ModFilter(_NameFilter):
-    """ A filter for Python modules.
-        ModFilter(modlist) where modlist is eg ['macpath', 'dospath'] """
-    def __init__(self, modlist):
-        self.elements = {}
-        for mod in modlist:
-            self.elements[mod] = 1
-            
-class DirFilter(_PathFilter):
-    """ A filter based on directories.
-        DirFilter(dirlist)
-        dirs may be relative and will be normalized.
-        Subdirectories of dirs will be excluded. """
-    def __init__(self, dirlist):
-        self.elements = {}
-        for pth in dirlist:
-            pth = os.path.normcase(os.path.abspath(pth))
-            self.elements[pth] = 1
-            
-class PkgFilter(_PathFilter):
-    """At this time, identical to a DirFilter (being lazy) """
-    def __init__(self, pkglist):
-        #warning - pkgs are expected to be full directories
-        self.elements = {}
-        for pkg in pkglist:
-            pth = os.path.normcase(os.path.abspath(pkg))
-            self.elements[pth] = 1
-            
-class StdLibFilter(_PathFilter):
-    """ A filter that excludes anything found in the standard library """
-    def __init__(self):
-        pth = os.path.normcase(os.path.join(sys.exec_prefix, 'lib'))
-        self.elements = {pth:1}
-     
-class PatternFilter(_PatternFilter):
-    """ A filter that excludes if any pattern is found in resource's path """
-    def __init__(self, patterns):
-        self.elements = []
-        for pat in patterns:
-            self.elements.append(re.compile(pat))

+ 0 - 1251
direct/src/showbase/ElementTree.py

@@ -1,1251 +0,0 @@
-#
-# ElementTree
-# $Id$
-#
-# light-weight XML support for Python 1.5.2 and later.
-#
-# history:
-# 2001-10-20 fl   created (from various sources)
-# 2001-11-01 fl   return root from parse method
-# 2002-02-16 fl   sort attributes in lexical order
-# 2002-04-06 fl   TreeBuilder refactoring, added PythonDoc markup
-# 2002-05-01 fl   finished TreeBuilder refactoring
-# 2002-07-14 fl   added basic namespace support to ElementTree.write
-# 2002-07-25 fl   added QName attribute support
-# 2002-10-20 fl   fixed encoding in write
-# 2002-11-24 fl   changed default encoding to ascii; fixed attribute encoding
-# 2002-11-27 fl   accept file objects or file names for parse/write
-# 2002-12-04 fl   moved XMLTreeBuilder back to this module
-# 2003-01-11 fl   fixed entity encoding glitch for us-ascii
-# 2003-02-13 fl   added XML literal factory
-# 2003-02-21 fl   added ProcessingInstruction/PI factory
-# 2003-05-11 fl   added tostring/fromstring helpers
-# 2003-05-26 fl   added ElementPath support
-# 2003-07-05 fl   added makeelement factory method
-# 2003-07-28 fl   added more well-known namespace prefixes
-# 2003-08-15 fl   fixed typo in ElementTree.findtext (Thomas Dartsch)
-# 2003-09-04 fl   fall back on emulator if ElementPath is not installed
-# 2003-10-31 fl   markup updates
-# 2003-11-15 fl   fixed nested namespace bug
-# 2004-03-28 fl   added XMLID helper
-# 2004-06-02 fl   added default support to findtext
-# 2004-06-08 fl   fixed encoding of non-ascii element/attribute names
-# 2004-08-23 fl   take advantage of post-2.1 expat features
-# 2005-02-01 fl   added iterparse implementation
-# 2005-03-02 fl   fixed iterparse support for pre-2.2 versions
-#
-# Copyright (c) 1999-2005 by Fredrik Lundh.  All rights reserved.
-#
-# [email protected]
-# http://www.pythonware.com
-#
-# --------------------------------------------------------------------
-# The ElementTree toolkit is
-#
-# Copyright (c) 1999-2005 by Fredrik Lundh
-#
-# By obtaining, using, and/or copying this software and/or its
-# associated documentation, you agree that you have read, understood,
-# and will comply with the following terms and conditions:
-#
-# Permission to use, copy, modify, and distribute this software and
-# its associated documentation for any purpose and without fee is
-# hereby granted, provided that the above copyright notice appears in
-# all copies, and that both that copyright notice and this permission
-# notice appear in supporting documentation, and that the name of
-# Secret Labs AB or the author not be used in advertising or publicity
-# pertaining to distribution of the software without specific, written
-# prior permission.
-#
-# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
-# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
-# ABILITY AND FITNESS.  IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
-# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
-# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
-# OF THIS SOFTWARE.
-# --------------------------------------------------------------------
-
-__all__ = [
-    # public symbols
-    "Comment",
-    "dump",
-    "Element", "ElementTree",
-    "fromstring",
-    "iselement", "iterparse",
-    "parse",
-    "PI", "ProcessingInstruction",
-    "QName",
-    "SubElement",
-    "tostring",
-    "TreeBuilder",
-    "VERSION", "XML",
-    "XMLTreeBuilder",
-    ]
-
-##
-# The <b>Element</b> type is a flexible container object, designed to
-# store hierarchical data structures in memory. The type can be
-# described as a cross between a list and a dictionary.
-# <p>
-# Each element has a number of properties associated with it:
-# <ul>
-# <li>a <i>tag</i>. This is a string identifying what kind of data
-# this element represents (the element type, in other words).</li>
-# <li>a number of <i>attributes</i>, stored in a Python dictionary.</li>
-# <li>a <i>text</i> string.</li>
-# <li>an optional <i>tail</i> string.</li>
-# <li>a number of <i>child elements</i>, stored in a Python sequence</li>
-# </ul>
-#
-# To create an element instance, use the {@link #Element} or {@link
-# #SubElement} factory functions.
-# <p>
-# The {@link #ElementTree} class can be used to wrap an element
-# structure, and convert it from and to XML.
-##
-
-import string, sys, re
-
-class _SimpleElementPath:
-    # emulate pre-1.2 find/findtext/findall behaviour
-    def find(self, element, tag):
-        for elem in element:
-            if elem.tag == tag:
-                return elem
-        return None
-    def findtext(self, element, tag, default=None):
-        for elem in element:
-            if elem.tag == tag:
-                return elem.text or ""
-        return default
-    def findall(self, element, tag):
-        if tag[:3] == ".//":
-            return element.getiterator(tag[3:])
-        result = []
-        for elem in element:
-            if elem.tag == tag:
-                result.append(elem)
-        return result
-
-try:
-    import ElementPath
-except ImportError:
-    # FIXME: issue warning in this case?
-    ElementPath = _SimpleElementPath()
-
-# TODO: add support for custom namespace resolvers/default namespaces
-# TODO: add improved support for incremental parsing
-
-VERSION = "1.2.6"
-
-##
-# Internal element class.  This class defines the Element interface,
-# and provides a reference implementation of this interface.
-# <p>
-# You should not create instances of this class directly.  Use the
-# appropriate factory functions instead, such as {@link #Element}
-# and {@link #SubElement}.
-#
-# @see Element
-# @see SubElement
-# @see Comment
-# @see ProcessingInstruction
-
-class _ElementInterface:
-    # <tag attrib>text<child/>...</tag>tail
-
-    ##
-    # (Attribute) Element tag.
-
-    tag = None
-
-    ##
-    # (Attribute) Element attribute dictionary.  Where possible, use
-    # {@link #_ElementInterface.get},
-    # {@link #_ElementInterface.set},
-    # {@link #_ElementInterface.keys}, and
-    # {@link #_ElementInterface.items} to access
-    # element attributes.
-
-    attrib = None
-
-    ##
-    # (Attribute) Text before first subelement.  This is either a
-    # string or the value None, if there was no text.
-
-    text = None
-
-    ##
-    # (Attribute) Text after this element's end tag, but before the
-    # next sibling element's start tag.  This is either a string or
-    # the value None, if there was no text.
-
-    tail = None # text after end tag, if any
-
-    def __init__(self, tag, attrib):
-        self.tag = tag
-        self.attrib = attrib
-        self._children = []
-
-    def __repr__(self):
-        return "<Element %s at %x>" % (self.tag, id(self))
-
-    ##
-    # Creates a new element object of the same type as this element.
-    #
-    # @param tag Element tag.
-    # @param attrib Element attributes, given as a dictionary.
-    # @return A new element instance.
-
-    def makeelement(self, tag, attrib):
-        return Element(tag, attrib)
-
-    ##
-    # Returns the number of subelements.
-    #
-    # @return The number of subelements.
-
-    def __len__(self):
-        return len(self._children)
-
-    ##
-    # Returns the given subelement.
-    #
-    # @param index What subelement to return.
-    # @return The given subelement.
-    # @exception IndexError If the given element does not exist.
-
-    def __getitem__(self, index):
-        return self._children[index]
-
-    ##
-    # Replaces the given subelement.
-    #
-    # @param index What subelement to replace.
-    # @param element The new element value.
-    # @exception IndexError If the given element does not exist.
-    # @exception AssertionError If element is not a valid object.
-
-    def __setitem__(self, index, element):
-        assert iselement(element)
-        self._children[index] = element
-
-    ##
-    # Deletes the given subelement.
-    #
-    # @param index What subelement to delete.
-    # @exception IndexError If the given element does not exist.
-
-    def __delitem__(self, index):
-        del self._children[index]
-
-    ##
-    # Returns a list containing subelements in the given range.
-    #
-    # @param start The first subelement to return.
-    # @param stop The first subelement that shouldn't be returned.
-    # @return A sequence object containing subelements.
-
-    def __getslice__(self, start, stop):
-        return self._children[start:stop]
-
-    ##
-    # Replaces a number of subelements with elements from a sequence.
-    #
-    # @param start The first subelement to replace.
-    # @param stop The first subelement that shouldn't be replaced.
-    # @param elements A sequence object with zero or more elements.
-    # @exception AssertionError If a sequence member is not a valid object.
-
-    def __setslice__(self, start, stop, elements):
-        for element in elements:
-            assert iselement(element)
-        self._children[start:stop] = list(elements)
-
-    ##
-    # Deletes a number of subelements.
-    #
-    # @param start The first subelement to delete.
-    # @param stop The first subelement to leave in there.
-
-    def __delslice__(self, start, stop):
-        del self._children[start:stop]
-
-    ##
-    # Adds a subelement to the end of this element.
-    #
-    # @param element The element to add.
-    # @exception AssertionError If a sequence member is not a valid object.
-
-    def append(self, element):
-        assert iselement(element)
-        self._children.append(element)
-
-    ##
-    # Inserts a subelement at the given position in this element.
-    #
-    # @param index Where to insert the new subelement.
-    # @exception AssertionError If the element is not a valid object.
-
-    def insert(self, index, element):
-        assert iselement(element)
-        self._children.insert(index, element)
-
-    ##
-    # Removes a matching subelement.  Unlike the <b>find</b> methods,
-    # this method compares elements based on identity, not on tag
-    # value or contents.
-    #
-    # @param element What element to remove.
-    # @exception ValueError If a matching element could not be found.
-    # @exception AssertionError If the element is not a valid object.
-
-    def remove(self, element):
-        assert iselement(element)
-        self._children.remove(element)
-
-    ##
-    # Returns all subelements.  The elements are returned in document
-    # order.
-    #
-    # @return A list of subelements.
-    # @defreturn list of Element instances
-
-    def getchildren(self):
-        return self._children
-
-    ##
-    # Finds the first matching subelement, by tag name or path.
-    #
-    # @param path What element to look for.
-    # @return The first matching element, or None if no element was found.
-    # @defreturn Element or None
-
-    def find(self, path):
-        return ElementPath.find(self, path)
-
-    ##
-    # Finds text for the first matching subelement, by tag name or path.
-    #
-    # @param path What element to look for.
-    # @param default What to return if the element was not found.
-    # @return The text content of the first matching element, or the
-    #     default value no element was found.  Note that if the element
-    #     has is found, but has no text content, this method returns an
-    #     empty string.
-    # @defreturn string
-
-    def findtext(self, path, default=None):
-        return ElementPath.findtext(self, path, default)
-
-    ##
-    # Finds all matching subelements, by tag name or path.
-    #
-    # @param path What element to look for.
-    # @return A list or iterator containing all matching elements,
-    #    in document order.
-    # @defreturn list of Element instances
-
-    def findall(self, path):
-        return ElementPath.findall(self, path)
-
-    ##
-    # Resets an element.  This function removes all subelements, clears
-    # all attributes, and sets the text and tail attributes to None.
-
-    def clear(self):
-        self.attrib.clear()
-        self._children = []
-        self.text = self.tail = None
-
-    ##
-    # Gets an element attribute.
-    #
-    # @param key What attribute to look for.
-    # @param default What to return if the attribute was not found.
-    # @return The attribute value, or the default value, if the
-    #     attribute was not found.
-    # @defreturn string or None
-
-    def get(self, key, default=None):
-        return self.attrib.get(key, default)
-
-    ##
-    # Sets an element attribute.
-    #
-    # @param key What attribute to set.
-    # @param value The attribute value.
-
-    def set(self, key, value):
-        self.attrib[key] = value
-
-    ##
-    # Gets a list of attribute names.  The names are returned in an
-    # arbitrary order (just like for an ordinary Python dictionary).
-    #
-    # @return A list of element attribute names.
-    # @defreturn list of strings
-
-    def keys(self):
-        return self.attrib.keys()
-
-    ##
-    # Gets element attributes, as a sequence.  The attributes are
-    # returned in an arbitrary order.
-    #
-    # @return A list of (name, value) tuples for all attributes.
-    # @defreturn list of (string, string) tuples
-
-    def items(self):
-        return self.attrib.items()
-
-    ##
-    # Creates a tree iterator.  The iterator loops over this element
-    # and all subelements, in document order, and returns all elements
-    # with a matching tag.
-    # <p>
-    # If the tree structure is modified during iteration, the result
-    # is undefined.
-    #
-    # @param tag What tags to look for (default is to return all elements).
-    # @return A list or iterator containing all the matching elements.
-    # @defreturn list or iterator
-
-    def getiterator(self, tag=None):
-        nodes = []
-        if tag == "*":
-            tag = None
-        if tag is None or self.tag == tag:
-            nodes.append(self)
-        for node in self._children:
-            nodes.extend(node.getiterator(tag))
-        return nodes
-
-# compatibility
-_Element = _ElementInterface
-
-##
-# Element factory.  This function returns an object implementing the
-# standard Element interface.  The exact class or type of that object
-# is implementation dependent, but it will always be compatible with
-# the {@link #_ElementInterface} class in this module.
-# <p>
-# The element name, attribute names, and attribute values can be
-# either 8-bit ASCII strings or Unicode strings.
-#
-# @param tag The element name.
-# @param attrib An optional dictionary, containing element attributes.
-# @param **extra Additional attributes, given as keyword arguments.
-# @return An element instance.
-# @defreturn Element
-
-def Element(tag, attrib={}, **extra):
-    attrib = attrib.copy()
-    attrib.update(extra)
-    return _ElementInterface(tag, attrib)
-
-##
-# Subelement factory.  This function creates an element instance, and
-# appends it to an existing element.
-# <p>
-# The element name, attribute names, and attribute values can be
-# either 8-bit ASCII strings or Unicode strings.
-#
-# @param parent The parent element.
-# @param tag The subelement name.
-# @param attrib An optional dictionary, containing element attributes.
-# @param **extra Additional attributes, given as keyword arguments.
-# @return An element instance.
-# @defreturn Element
-
-def SubElement(parent, tag, attrib={}, **extra):
-    attrib = attrib.copy()
-    attrib.update(extra)
-    element = parent.makeelement(tag, attrib)
-    parent.append(element)
-    return element
-
-##
-# Comment element factory.  This factory function creates a special
-# element that will be serialized as an XML comment.
-# <p>
-# The comment string can be either an 8-bit ASCII string or a Unicode
-# string.
-#
-# @param text A string containing the comment string.
-# @return An element instance, representing a comment.
-# @defreturn Element
-
-def Comment(text=None):
-    element = Element(Comment)
-    element.text = text
-    return element
-
-##
-# PI element factory.  This factory function creates a special element
-# that will be serialized as an XML processing instruction.
-#
-# @param target A string containing the PI target.
-# @param text A string containing the PI contents, if any.
-# @return An element instance, representing a PI.
-# @defreturn Element
-
-def ProcessingInstruction(target, text=None):
-    element = Element(ProcessingInstruction)
-    element.text = target
-    if text:
-        element.text = element.text + " " + text
-    return element
-
-PI = ProcessingInstruction
-
-##
-# QName wrapper.  This can be used to wrap a QName attribute value, in
-# order to get proper namespace handling on output.
-#
-# @param text A string containing the QName value, in the form {uri}local,
-#     or, if the tag argument is given, the URI part of a QName.
-# @param tag Optional tag.  If given, the first argument is interpreted as
-#     an URI, and this argument is interpreted as a local name.
-# @return An opaque object, representing the QName.
-
-class QName:
-    def __init__(self, text_or_uri, tag=None):
-        if tag:
-            text_or_uri = "{%s}%s" % (text_or_uri, tag)
-        self.text = text_or_uri
-    def __str__(self):
-        return self.text
-    def __hash__(self):
-        return hash(self.text)
-    def __cmp__(self, other):
-        if isinstance(other, QName):
-            return cmp(self.text, other.text)
-        return cmp(self.text, other)
-
-##
-# ElementTree wrapper class.  This class represents an entire element
-# hierarchy, and adds some extra support for serialization to and from
-# standard XML.
-#
-# @param element Optional root element.
-# @keyparam file Optional file handle or name.  If given, the
-#     tree is initialized with the contents of this XML file.
-
-class ElementTree:
-
-    def __init__(self, element=None, file=None):
-        assert element is None or iselement(element)
-        self._root = element # first node
-        if file:
-            self.parse(file)
-
-    ##
-    # Gets the root element for this tree.
-    #
-    # @return An element instance.
-    # @defreturn Element
-
-    def getroot(self):
-        return self._root
-
-    ##
-    # Replaces the root element for this tree.  This discards the
-    # current contents of the tree, and replaces it with the given
-    # element.  Use with care.
-    #
-    # @param element An element instance.
-
-    def _setroot(self, element):
-        assert iselement(element)
-        self._root = element
-
-    ##
-    # Loads an external XML document into this element tree.
-    #
-    # @param source A file name or file object.
-    # @param parser An optional parser instance.  If not given, the
-    #     standard {@link XMLTreeBuilder} parser is used.
-    # @return The document root element.
-    # @defreturn Element
-
-    def parse(self, source, parser=None):
-        if not hasattr(source, "read"):
-            source = open(source, "rb")
-        if not parser:
-            parser = XMLTreeBuilder()
-        while 1:
-            data = source.read(32768)
-            if not data:
-                break
-            parser.feed(data)
-        self._root = parser.close()
-        return self._root
-
-    ##
-    # Creates a tree iterator for the root element.  The iterator loops
-    # over all elements in this tree, in document order.
-    #
-    # @param tag What tags to look for (default is to return all elements)
-    # @return An iterator.
-    # @defreturn iterator
-
-    def getiterator(self, tag=None):
-        assert self._root is not None
-        return self._root.getiterator(tag)
-
-    ##
-    # Finds the first toplevel element with given tag.
-    # Same as getroot().find(path).
-    #
-    # @param path What element to look for.
-    # @return The first matching element, or None if no element was found.
-    # @defreturn Element or None
-
-    def find(self, path):
-        assert self._root is not None
-        if path[:1] == "/":
-            path = "." + path
-        return self._root.find(path)
-
-    ##
-    # Finds the element text for the first toplevel element with given
-    # tag.  Same as getroot().findtext(path).
-    #
-    # @param path What toplevel element to look for.
-    # @param default What to return if the element was not found.
-    # @return The text content of the first matching element, or the
-    #     default value no element was found.  Note that if the element
-    #     has is found, but has no text content, this method returns an
-    #     empty string.
-    # @defreturn string
-
-    def findtext(self, path, default=None):
-        assert self._root is not None
-        if path[:1] == "/":
-            path = "." + path
-        return self._root.findtext(path, default)
-
-    ##
-    # Finds all toplevel elements with the given tag.
-    # Same as getroot().findall(path).
-    #
-    # @param path What element to look for.
-    # @return A list or iterator containing all matching elements,
-    #    in document order.
-    # @defreturn list of Element instances
-
-    def findall(self, path):
-        assert self._root is not None
-        if path[:1] == "/":
-            path = "." + path
-        return self._root.findall(path)
-
-    ##
-    # Writes the element tree to a file, as XML.
-    #
-    # @param file A file name, or a file object opened for writing.
-    # @param encoding Optional output encoding (default is US-ASCII).
-
-    def write(self, file, encoding="us-ascii"):
-        assert self._root is not None
-        if not hasattr(file, "write"):
-            file = open(file, "wb")
-        if not encoding:
-            encoding = "us-ascii"
-        elif encoding != "utf-8" and encoding != "us-ascii":
-            file.write("<?xml version='1.0' encoding='%s'?>\n" % encoding)
-        self._write(file, self._root, encoding, {})
-
-    def _write(self, file, node, encoding, namespaces):
-        # write XML to file
-        tag = node.tag
-        if tag is Comment:
-            file.write("<!-- %s -->" % _escape_cdata(node.text, encoding))
-        elif tag is ProcessingInstruction:
-            file.write("<?%s?>" % _escape_cdata(node.text, encoding))
-        else:
-            items = node.items()
-            xmlns_items = [] # new namespaces in this scope
-            try:
-                if isinstance(tag, QName) or tag[:1] == "{":
-                    tag, xmlns = fixtag(tag, namespaces)
-                    if xmlns: xmlns_items.append(xmlns)
-            except TypeError:
-                _raise_serialization_error(tag)
-            file.write("<" + _encode(tag, encoding))
-            if items or xmlns_items:
-                items.sort() # lexical order
-                for k, v in items:
-                    try:
-                        if isinstance(k, QName) or k[:1] == "{":
-                            k, xmlns = fixtag(k, namespaces)
-                            if xmlns: xmlns_items.append(xmlns)
-                    except TypeError:
-                        _raise_serialization_error(k)
-                    try:
-                        if isinstance(v, QName):
-                            v, xmlns = fixtag(v, namespaces)
-                            if xmlns: xmlns_items.append(xmlns)
-                    except TypeError:
-                        _raise_serialization_error(v)
-                    file.write(" %s=\"%s\"" % (_encode(k, encoding),
-                                               _escape_attrib(v, encoding)))
-                for k, v in xmlns_items:
-                    file.write(" %s=\"%s\"" % (_encode(k, encoding),
-                                               _escape_attrib(v, encoding)))
-            if node.text or len(node):
-                file.write(">")
-                if node.text:
-                    file.write(_escape_cdata(node.text, encoding))
-                for n in node:
-                    self._write(file, n, encoding, namespaces)
-                file.write("</" + _encode(tag, encoding) + ">")
-            else:
-                file.write(" />")
-            for k, v in xmlns_items:
-                del namespaces[v]
-        if node.tail:
-            file.write(_escape_cdata(node.tail, encoding))
-
-# --------------------------------------------------------------------
-# helpers
-
-##
-# Checks if an object appears to be a valid element object.
-#
-# @param An element instance.
-# @return A true value if this is an element object.
-# @defreturn flag
-
-def iselement(element):
-    # FIXME: not sure about this; might be a better idea to look
-    # for tag/attrib/text attributes
-    return isinstance(element, _ElementInterface) or hasattr(element, "tag")
-
-##
-# Writes an element tree or element structure to sys.stdout.  This
-# function should be used for debugging only.
-# <p>
-# The exact output format is implementation dependent.  In this
-# version, it's written as an ordinary XML file.
-#
-# @param elem An element tree or an individual element.
-
-def dump(elem):
-    # debugging
-    if not isinstance(elem, ElementTree):
-        elem = ElementTree(elem)
-    elem.write(sys.stdout)
-    tail = elem.getroot().tail
-    if not tail or tail[-1] != "\n":
-        sys.stdout.write("\n")
-
-def _encode(s, encoding):
-    try:
-        return s.encode(encoding)
-    except AttributeError:
-        return s # 1.5.2: assume the string uses the right encoding
-
-_escape = re.compile(u"[&<>\"\u0080-\uffff]+")
-
-_escape_map = {
-    "&": "&amp;",
-    "<": "&lt;",
-    ">": "&gt;",
-    '"': "&quot;",
-}
-
-_namespace_map = {
-    # "well-known" namespace prefixes
-    "http://www.w3.org/XML/1998/namespace": "xml",
-    "http://www.w3.org/1999/xhtml": "html",
-    "http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf",
-    "http://schemas.xmlsoap.org/wsdl/": "wsdl",
-}
-
-def _raise_serialization_error(text):
-    raise TypeError(
-        "cannot serialize %r (type %s)" % (text, type(text).__name__)
-        )
-
-def _encode_entity(text, pattern=_escape):
-    # map reserved and non-ascii characters to numerical entities
-    def escape_entities(m, map=_escape_map):
-        out = []
-        append = out.append
-        for char in m.group():
-            text = map.get(char)
-            if text is None:
-                text = "&#%d;" % ord(char)
-            append(text)
-        return string.join(out, "")
-    try:
-        return _encode(pattern.sub(escape_entities, text), "ascii")
-    except TypeError:
-        _raise_serialization_error(text)
-
-#
-# the following functions assume an ascii-compatible encoding
-# (or "utf-16")
-
-def _escape_cdata(text, encoding=None):
-    # escape character data
-    try:
-        if encoding:
-            try:
-                text = _encode(text, encoding)
-            except UnicodeError:
-                return _encode_entity(text)
-        text = text.replace("&", "&amp;")
-        text = text.replace("<", "&lt;")
-        text = text.replace( ">", "&gt;")
-        return text
-    except (TypeError, AttributeError):
-        _raise_serialization_error(text)
-
-def _escape_attrib(text, encoding=None):
-    # escape attribute value
-    try:
-        if encoding:
-            try:
-                text = _encode(text, encoding)
-            except UnicodeError:
-                return _encode_entity(text)
-        text = text.replace("&", "&amp;")
-        text = text.replace("'", "&apos;") # FIXME: overkill
-        text = text.replace("\"", "&quot;")
-        text = text.replace("<", "&lt;")
-        text = text.replace(">", "&gt;")
-        return text
-    except (TypeError, AttributeError):
-        _raise_serialization_error(text)
-
-def fixtag(tag, namespaces):
-    # given a decorated tag (of the form {uri}tag), return prefixed
-    # tag and namespace declaration, if any
-    if isinstance(tag, QName):
-        tag = tag.text
-    namespace_uri, tag = string.split(tag[1:], "}", 1)
-    prefix = namespaces.get(namespace_uri)
-    if prefix is None:
-        prefix = _namespace_map.get(namespace_uri)
-        if prefix is None:
-            prefix = "ns%d" % len(namespaces)
-        namespaces[namespace_uri] = prefix
-        if prefix == "xml":
-            xmlns = None
-        else:
-            xmlns = ("xmlns:%s" % prefix, namespace_uri)
-    else:
-        xmlns = None
-    return "%s:%s" % (prefix, tag), xmlns
-
-##
-# Parses an XML document into an element tree.
-#
-# @param source A filename or file object containing XML data.
-# @param parser An optional parser instance.  If not given, the
-#     standard {@link XMLTreeBuilder} parser is used.
-# @return An ElementTree instance
-
-def parse(source, parser=None):
-    tree = ElementTree()
-    tree.parse(source, parser)
-    return tree
-
-##
-# Parses an XML document into an element tree incrementally, and reports
-# what's going on to the user.
-#
-# @param source A filename or file object containing XML data.
-# @param events A list of events to report back.  If omitted, only "end"
-#     events are reported.
-# @return A (event, elem) iterator.
-
-class iterparse:
-
-    def __init__(self, source, events=None):
-        if not hasattr(source, "read"):
-            source = open(source, "rb")
-        self._file = source
-        self._events = []
-        self._index = 0
-        self.root = self._root = None
-        self._parser = XMLTreeBuilder()
-        # wire up the parser for event reporting
-        parser = self._parser._parser
-        append = self._events.append
-        if events is None:
-            events = ["end"]
-        for event in events:
-            if event == "start":
-                try:
-                    parser.ordered_attributes = 1
-                    parser.specified_attributes = 1
-                    def handler(tag, attrib_in, event=event, append=append,
-                                start=self._parser._start_list):
-                        append((event, start(tag, attrib_in)))
-                    parser.StartElementHandler = handler
-                except AttributeError:
-                    def handler(tag, attrib_in, event=event, append=append,
-                                start=self._parser._start):
-                        append((event, start(tag, attrib_in)))
-                    parser.StartElementHandler = handler
-            elif event == "end":
-                def handler(tag, event=event, append=append,
-                            end=self._parser._end):
-                    append((event, end(tag)))
-                parser.EndElementHandler = handler
-            elif event == "start-ns":
-                def handler(prefix, uri, event=event, append=append):
-                    try:
-                        uri = _encode(uri, "ascii")
-                    except UnicodeError:
-                        pass
-                    append((event, (prefix or "", uri)))
-                parser.StartNamespaceDeclHandler = handler
-            elif event == "end-ns":
-                def handler(prefix, event=event, append=append):
-                    append((event, None))
-                parser.EndNamespaceDeclHandler = handler
-
-    def next(self):
-        while 1:
-            try:
-                item = self._events[self._index]
-            except IndexError:
-                if self._parser is None:
-                    self.root = self._root
-                    try:
-                        raise StopIteration
-                    except NameError:
-                        raise IndexError
-                # load event buffer
-                del self._events[:]
-                self._index = 0
-                data = self._file.read(16384)
-                if data:
-                    self._parser.feed(data)
-                else:
-                    self._root = self._parser.close()
-                    self._parser = None
-            else:
-                self._index = self._index + 1
-                return item
-
-    try:
-        iter
-        def __iter__(self):
-            return self
-    except NameError:
-        def __getitem__(self, index):
-            return self.next()
-
-##
-# Parses an XML document from a string constant.  This function can
-# be used to embed "XML literals" in Python code.
-#
-# @param source A string containing XML data.
-# @return An Element instance.
-# @defreturn Element
-
-def XML(text):
-    parser = XMLTreeBuilder()
-    parser.feed(text)
-    return parser.close()
-
-##
-# Parses an XML document from a string constant, and also returns
-# a dictionary which maps from element id:s to elements.
-#
-# @param source A string containing XML data.
-# @return A tuple containing an Element instance and a dictionary.
-# @defreturn (Element, dictionary)
-
-def XMLID(text):
-    parser = XMLTreeBuilder()
-    parser.feed(text)
-    tree = parser.close()
-    ids = {}
-    for elem in tree.getiterator():
-        id = elem.get("id")
-        if id:
-            ids[id] = elem
-    return tree, ids
-
-##
-# Parses an XML document from a string constant.  Same as {@link #XML}.
-#
-# @def fromstring(text)
-# @param source A string containing XML data.
-# @return An Element instance.
-# @defreturn Element
-
-fromstring = XML
-
-##
-# Generates a string representation of an XML element, including all
-# subelements.
-#
-# @param element An Element instance.
-# @return An encoded string containing the XML data.
-# @defreturn string
-
-def tostring(element, encoding=None):
-    class dummy:
-        pass
-    data = []
-    file = dummy()
-    file.write = data.append
-    ElementTree(element).write(file, encoding)
-    return string.join(data, "")
-
-##
-# Generic element structure builder.  This builder converts a sequence
-# of {@link #TreeBuilder.start}, {@link #TreeBuilder.data}, and {@link
-# #TreeBuilder.end} method calls to a well-formed element structure.
-# <p>
-# You can use this class to build an element structure using a custom XML
-# parser, or a parser for some other XML-like format.
-#
-# @param element_factory Optional element factory.  This factory
-#    is called to create new Element instances, as necessary.
-
-class TreeBuilder:
-
-    def __init__(self, element_factory=None):
-        self._data = [] # data collector
-        self._elem = [] # element stack
-        self._last = None # last element
-        self._tail = None # true if we're after an end tag
-        if element_factory is None:
-            element_factory = _ElementInterface
-        self._factory = element_factory
-
-    ##
-    # Flushes the parser buffers, and returns the toplevel documen
-    # element.
-    #
-    # @return An Element instance.
-    # @defreturn Element
-
-    def close(self):
-        assert len(self._elem) == 0, "missing end tags"
-        assert self._last != None, "missing toplevel element"
-        return self._last
-
-    def _flush(self):
-        if self._data:
-            if self._last is not None:
-                text = string.join(self._data, "")
-                if self._tail:
-                    assert self._last.tail is None, "internal error (tail)"
-                    self._last.tail = text
-                else:
-                    assert self._last.text is None, "internal error (text)"
-                    self._last.text = text
-            self._data = []
-
-    ##
-    # Adds text to the current element.
-    #
-    # @param data A string.  This should be either an 8-bit string
-    #    containing ASCII text, or a Unicode string.
-
-    def data(self, data):
-        self._data.append(data)
-
-    ##
-    # Opens a new element.
-    #
-    # @param tag The element name.
-    # @param attrib A dictionary containing element attributes.
-    # @return The opened element.
-    # @defreturn Element
-
-    def start(self, tag, attrs):
-        self._flush()
-        self._last = elem = self._factory(tag, attrs)
-        if self._elem:
-            self._elem[-1].append(elem)
-        self._elem.append(elem)
-        self._tail = 0
-        return elem
-
-    ##
-    # Closes the current element.
-    #
-    # @param tag The element name.
-    # @return The closed element.
-    # @defreturn Element
-
-    def end(self, tag):
-        self._flush()
-        self._last = self._elem.pop()
-        assert self._last.tag == tag,\
-               "end tag mismatch (expected %s, got %s)" % (
-                   self._last.tag, tag)
-        self._tail = 1
-        return self._last
-
-##
-# Element structure builder for XML source data, based on the
-# <b>expat</b> parser.
-#
-# @keyparam target Target object.  If omitted, the builder uses an
-#     instance of the standard {@link #TreeBuilder} class.
-# @keyparam html Predefine HTML entities.  This flag is not supported
-#     by the current implementation.
-# @see #ElementTree
-# @see #TreeBuilder
-
-class XMLTreeBuilder:
-
-    def __init__(self, html=0, target=None):
-        try:
-            from xml.parsers import expat
-        except ImportError:
-            raise ImportError(
-                "No module named expat; use SimpleXMLTreeBuilder instead"
-                )
-        self._parser = parser = expat.ParserCreate(None, "}")
-        if target is None:
-            target = TreeBuilder()
-        self._target = target
-        self._names = {} # name memo cache
-        # callbacks
-        parser.DefaultHandlerExpand = self._default
-        parser.StartElementHandler = self._start
-        parser.EndElementHandler = self._end
-        parser.CharacterDataHandler = self._data
-        # let expat do the buffering, if supported
-        try:
-            self._parser.buffer_text = 1
-        except AttributeError:
-            pass
-        # use new-style attribute handling, if supported
-        try:
-            self._parser.ordered_attributes = 1
-            self._parser.specified_attributes = 1
-            parser.StartElementHandler = self._start_list
-        except AttributeError:
-            pass
-        encoding = None
-        if not parser.returns_unicode:
-            encoding = "utf-8"
-        # target.xml(encoding, None)
-        self._doctype = None
-        self.entity = {}
-
-    def _fixtext(self, text):
-        # convert text string to ascii, if possible
-        try:
-            return _encode(text, "ascii")
-        except UnicodeError:
-            return text
-
-    def _fixname(self, key):
-        # expand qname, and convert name string to ascii, if possible
-        try:
-            name = self._names[key]
-        except KeyError:
-            name = key
-            if "}" in name:
-                name = "{" + name
-            self._names[key] = name = self._fixtext(name)
-        return name
-
-    def _start(self, tag, attrib_in):
-        fixname = self._fixname
-        tag = fixname(tag)
-        attrib = {}
-        for key, value in attrib_in.items():
-            attrib[fixname(key)] = self._fixtext(value)
-        return self._target.start(tag, attrib)
-
-    def _start_list(self, tag, attrib_in):
-        fixname = self._fixname
-        tag = fixname(tag)
-        attrib = {}
-        if attrib_in:
-            for i in range(0, len(attrib_in), 2):
-                attrib[fixname(attrib_in[i])] = self._fixtext(attrib_in[i+1])
-        return self._target.start(tag, attrib)
-
-    def _data(self, text):
-        return self._target.data(self._fixtext(text))
-
-    def _end(self, tag):
-        return self._target.end(self._fixname(tag))
-
-    def _default(self, text):
-        prefix = text[:1]
-        if prefix == "&":
-            # deal with undefined entities
-            try:
-                self._target.data(self.entity[text[1:-1]])
-            except KeyError:
-                from xml.parsers import expat
-                raise expat.error(
-                    "undefined entity %s: line %d, column %d" %
-                    (text, self._parser.ErrorLineNumber,
-                    self._parser.ErrorColumnNumber)
-                    )
-        elif prefix == "<" and text[:9] == "<!DOCTYPE":
-            self._doctype = [] # inside a doctype declaration
-        elif self._doctype is not None:
-            # parse doctype contents
-            if prefix == ">":
-                self._doctype = None
-                return
-            text = string.strip(text)
-            if not text:
-                return
-            self._doctype.append(text)
-            n = len(self._doctype)
-            if n > 2:
-                type = self._doctype[1]
-                if type == "PUBLIC" and n == 4:
-                    name, type, pubid, system = self._doctype
-                elif type == "SYSTEM" and n == 3:
-                    name, type, system = self._doctype
-                    pubid = None
-                else:
-                    return
-                if pubid:
-                    pubid = pubid[1:-1]
-                self.doctype(name, pubid, system[1:-1])
-                self._doctype = None
-
-    ##
-    # Handles a doctype declaration.
-    #
-    # @param name Doctype name.
-    # @param pubid Public identifier.
-    # @param system System identifier.
-
-    def doctype(self, name, pubid, system):
-        pass
-
-    ##
-    # Feeds data to the parser.
-    #
-    # @param data Encoded data.
-
-    def feed(self, data):
-        self._parser.Parse(data, 0)
-
-    ##
-    # Finishes feeding data to the parser.
-    #
-    # @return An element structure.
-    # @defreturn Element
-
-    def close(self):
-        self._parser.Parse("", 1) # end of data
-        tree = self._target.close()
-        del self._target, self._parser # get rid of circular references
-        return tree

+ 1 - 1
direct/src/showbase/HTMLTree.py

@@ -1,4 +1,4 @@
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 
 class HTMLTree(ET.ElementTree):
     def __init__(self, title):

+ 0 - 1
direct/src/showbase/PythonUtil.py

@@ -51,7 +51,6 @@ import traceback
 import __builtin__
 from StringIO import StringIO
 import marshal
-import ElementTree as ET
 import BpDb
 import unicodedata
 import bisect

+ 0 - 71
direct/src/test/ModelScreenShot.py

@@ -1,71 +0,0 @@
-import direct
-from pandac.PandaModules import loadPrcFileData
-
-from direct.showbase.DirectObject import DirectObject
-from direct.directbase.DirectStart import *
-from pandac.PandaModules import *
-import direct.gui.DirectGuiGlobals as DGG
-from direct.gui.DirectGui import *
-from direct.task import Task
-
-from direct.directnotify import DirectNotifyGlobal
-import math
-from operator import *
-
-import ModelScreenShotGlobals
-
-class ModelScreenShot(DirectObject):
-    notify = DirectNotifyGlobal.directNotify.newCategory("ModelScreenShot")
-
-    def __init__(self):
-
-        # Grab a list of models to capture screenshots of from an array in
-        # the globals file
-        self.modelsToView = ModelScreenShotGlobals.models
-        self.models = []
-
-        # Attach all the models listed to render and save a pointer to them
-        # in an array.  Then hide the model.
-        for model in self.modelsToView:
-            m = loader.loadModel(model)
-            m.reparentTo(render)
-            self.models.append(m)
-            m.hide()
-
-        # Set a nice farplane far, far away
-        self.lens = base.camera.getChild(0).node().getLens()
-        self.lens.setFar(10000)
-
-        # Hide the cursor
-        self.props = WindowProperties()
-        self.props.setCursorHidden(0)
-        base.win.requestProperties(self.props)
-
-        # Method for getting the distance to an object from the camera
-        def getDist(obj, lens):
-            rad = obj.getBounds().getRadius()
-            fov = lens.getFov()
-            dist = rad / math.tan(deg2Rad(min(fov[0], fov[1]/2.0)))
-            return dist
-
-        # Determin the optimal camera position
-        def getOptCamPos(obj, dist):
-            cen = obj.getBounds().getCenter()
-            camPos = VBase3(cen.getX(), -dist, cen.getZ())
-            return camPos
-
-        # Generate screenshots
-        def generatePics():
-            for model in self.models:
-                model.show()
-                base.camera.setPos(getOptCamPos(model, getDist(model, self.lens)))
-                uFilename = model.getName().replace('.egg','.jpg')
-                self.notify.info("screenshot %s   camera pos: %s" % (uFilename, base.camera.getPos()))
-                base.graphicsEngine.renderFrame()
-                base.screenshot(namePrefix = uFilename, defaultFilename = 0)
-                model.hide()
-
-        generatePics()
-        
-mss = ModelScreenShot()
-run()

+ 0 - 6
direct/src/test/ModelScreenShotGlobals.py

@@ -1,6 +0,0 @@
-# Replace these with the models you want to screenshot
-models = [
-    'models/misc/smiley',
-    'models/misc/sphere',
-    'models/misc/xyzAxis'
-]

+ 0 - 0
direct/src/test/Sources.pp


+ 0 - 0
direct/src/test/__init__.py


+ 25 - 0
dtool/src/cppparser/cppBison.yxx

@@ -270,6 +270,7 @@ pop_struct() {
 %token KW_MUTABLE
 %token KW_NAMESPACE
 %token KW_NEW
+%token KW_NOEXCEPT
 %token KW_OPERATOR
 %token KW_PRIVATE
 %token KW_PROTECTED
@@ -945,6 +946,10 @@ function_post:
         | KW_CONST
 {
   $$ = (int)CPPFunctionType::F_const_method;
+}
+        | function_post KW_NOEXCEPT
+{
+  $$ = (int)CPPFunctionType::F_noexcept;
 }
         | function_post KW_THROW '(' ')'
 {
@@ -1234,6 +1239,11 @@ instance_identifier:
 {
   $$ = $2;
   $$->add_modifier(IIT_reference);
+}
+        | ANDAND instance_identifier  %prec UNARY
+{
+  $$ = $2;
+  $$->add_modifier(IIT_rvalue_reference);
 }
         | SCOPING '*' instance_identifier  %prec UNARY
 {
@@ -1431,6 +1441,11 @@ not_paren_formal_parameter_identifier:
 {
   $$ = $2;
   $$->add_modifier(IIT_reference);
+}
+        | ANDAND not_paren_formal_parameter_identifier  %prec UNARY
+{
+  $$ = $2;
+  $$->add_modifier(IIT_rvalue_reference);
 }
         | SCOPING '*' not_paren_formal_parameter_identifier  %prec UNARY
 {
@@ -1471,6 +1486,11 @@ formal_parameter_identifier:
 {
   $$ = $2;
   $$->add_modifier(IIT_reference);
+}
+        | ANDAND formal_parameter_identifier  %prec UNARY
+{
+  $$ = $2;
+  $$->add_modifier(IIT_rvalue_reference);
 }
         | SCOPING '*' formal_parameter_identifier  %prec UNARY
 {
@@ -1514,6 +1534,11 @@ empty_instance_identifier:
 {
   $$ = $2;
   $$->add_modifier(IIT_reference);
+}
+        | ANDAND empty_instance_identifier  %prec UNARY
+{
+  $$ = $2;
+  $$->add_modifier(IIT_rvalue_reference);
 }
         | SCOPING '*' empty_instance_identifier  %prec UNARY
 {

+ 3 - 0
dtool/src/cppparser/cppFunctionType.cxx

@@ -243,6 +243,9 @@ output_instance(ostream &out, int indent_level, CPPScope *scope,
   if (_flags & F_const_method) {
     out << " const";
   }
+  if (_flags & F_noexcept) {
+    out << " noexcept";
+  }
 }
 
 ////////////////////////////////////////////////////////////////////

+ 1 - 0
dtool/src/cppparser/cppFunctionType.h

@@ -36,6 +36,7 @@ public:
     F_method_pointer    = 0x10,
     F_unary_op          = 0x20,
     F_operator          = 0x40,
+    F_noexcept          = 0x80,
   };
 
   CPPFunctionType(CPPType *return_type, CPPParameterList *parameters,

+ 8 - 2
dtool/src/cppparser/cppInstanceIdentifier.cxx

@@ -133,7 +133,7 @@ add_func_modifier(CPPParameterList *params, int flags) {
   // function, check if the parameter list is empty.  If it is, this
   // is really a unary operator, so set the unary_op flag.  Operators
   // () and [] are never considered unary operators.
-  if (_ident != NULL && 
+  if (_ident != NULL &&
       _ident->get_simple_name().substr(0, 9) == "operator ") {
 
     if (_ident->get_simple_name() != string("operator ()") &&
@@ -245,7 +245,13 @@ r_unroll_type(CPPType *start_type,
     break;
 
   case IIT_reference:
-    result = new CPPReferenceType(r_unroll_type(start_type, mi));
+    result = new CPPReferenceType(r_unroll_type(start_type, mi),
+                                  CPPReferenceType::VC_lvalue);
+    break;
+
+  case IIT_rvalue_reference:
+    result = new CPPReferenceType(r_unroll_type(start_type, mi),
+                                  CPPReferenceType::VC_rvalue);
     break;
 
   case IIT_scoped_pointer:

+ 1 - 0
dtool/src/cppparser/cppInstanceIdentifier.h

@@ -32,6 +32,7 @@ class CPPPreprocessor;
 enum CPPInstanceIdentifierType {
   IIT_pointer,
   IIT_reference,
+  IIT_rvalue_reference,
   IIT_scoped_pointer,
   IIT_array,
   IIT_const,

+ 20 - 2
dtool/src/cppparser/cppPreprocessor.cxx

@@ -829,6 +829,8 @@ internal_get_next_token() {
     }
     if (next_c == '<') return CPPToken(LSHIFT, first_line, first_col, first_file);
     if (next_c == '=') return CPPToken(LECOMPARE, first_line, first_col, first_file);
+    if (next_c == ':') return CPPToken('[', first_line, first_col, first_file);
+    if (next_c == '%') return CPPToken('{', first_line, first_col, first_file);
     break;
 
   case '>':
@@ -872,6 +874,7 @@ internal_get_next_token() {
 
   case ':':
     if (next_c == ':') return CPPToken(SCOPE, first_line, first_col, first_file);
+    if (next_c == '>') return CPPToken(']', first_line, first_col, first_file);
     break;
 
   case '*':
@@ -884,6 +887,7 @@ internal_get_next_token() {
 
   case '%':
     if (next_c == '=') return CPPToken(MODEQUAL, first_line, first_col, first_file);
+    if (next_c == '>') return CPPToken('}', first_line, first_col, first_file);
     break;
   }
 
@@ -1416,7 +1420,7 @@ handle_include_directive(const string &args, int first_line,
       found_file = true;
       source = CPPFile::S_local;
     }
-    
+
     // Search the same directory as the includer.
     if (!angle_quotes && !found_file) {
       Filename match(get_file()._filename.get_dirname(), filename);
@@ -1444,7 +1448,7 @@ handle_include_directive(const string &args, int first_line,
         }
       }
     }
-    
+
     if (!found_file) {
       warning("Cannot find " + filename.get_fullpath(),
               first_line, first_col, first_file);
@@ -2037,6 +2041,7 @@ check_keyword(const string &name) {
   if (name == "__make_seq") return KW_MAKE_SEQ;
   if (name == "mutable") return KW_MUTABLE;
   if (name == "namespace") return KW_NAMESPACE;
+  if (name == "noexcept") return KW_NOEXCEPT;
   if (name == "new") return KW_NEW;
   if (name == "operator") return KW_OPERATOR;
   if (name == "private") return KW_PRIVATE;
@@ -2065,6 +2070,19 @@ check_keyword(const string &name) {
   if (name == "wchar_t") return KW_WCHAR_T;
   if (name == "while") return KW_WHILE;
 
+  // These are alternative ways to refer to built-in operators.
+  if (name == "and") return ANDAND;
+  if (name == "and_eq") return ANDEQUAL;
+  if (name == "bitand") return '&';
+  if (name == "bitor") return '|';
+  if (name == "compl") return '~';
+  if (name == "not") return '!';
+  if (name == "not_eq") return NECOMPARE;
+  if (name == "or") return OROR;
+  if (name == "or_eq") return OREQUAL;
+  if (name == "xor") return '^';
+  if (name == "xor_eq") return XOREQUAL;
+
   if (!cpp_longlong_keyword.empty() && name == cpp_longlong_keyword) {
     return KW_LONGLONG;
   }

+ 17 - 5
dtool/src/cppparser/cppReferenceType.cxx

@@ -21,9 +21,10 @@
 //  Description:
 ////////////////////////////////////////////////////////////////////
 CPPReferenceType::
-CPPReferenceType(CPPType *pointing_at) :
+CPPReferenceType(CPPType *pointing_at, ValueCategory vcat) :
   CPPType(CPPFile()),
-  _pointing_at(pointing_at)
+  _pointing_at(pointing_at),
+  _value_category(vcat)
 {
 }
 
@@ -146,8 +147,14 @@ void CPPReferenceType::
 output_instance(ostream &out, int indent_level, CPPScope *scope,
                 bool complete, const string &prename,
                 const string &name) const {
-  _pointing_at->output_instance(out, indent_level, scope, complete,
-                                "&" + prename, name);
+
+  if (_value_category == VC_rvalue) {
+    _pointing_at->output_instance(out, indent_level, scope, complete,
+                                  "&&" + prename, name);
+  } else {
+    _pointing_at->output_instance(out, indent_level, scope, complete,
+                                  "&" + prename, name);
+  }
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -182,7 +189,8 @@ is_equal(const CPPDeclaration *other) const {
   const CPPReferenceType *ot = ((CPPDeclaration *)other)->as_reference_type();
   assert(ot != NULL);
 
-  return _pointing_at == ot->_pointing_at;
+  return (_pointing_at == ot->_pointing_at) &&
+         (_value_category == ot->_value_category);
 }
 
 
@@ -198,5 +206,9 @@ is_less(const CPPDeclaration *other) const {
   const CPPReferenceType *ot = ((CPPDeclaration *)other)->as_reference_type();
   assert(ot != NULL);
 
+  if (_value_category != ot->_value_category) {
+    return (_value_category < ot->_value_category);
+  }
+
   return _pointing_at < ot->_pointing_at;
 }

+ 11 - 2
dtool/src/cppparser/cppReferenceType.h

@@ -21,13 +21,22 @@
 
 ///////////////////////////////////////////////////////////////////
 //       Class : CPPReferenceType
-// Description :
+// Description : Either an lvalue- or rvalue-reference.
 ////////////////////////////////////////////////////////////////////
 class CPPReferenceType : public CPPType {
 public:
-  CPPReferenceType(CPPType *pointing_at);
+  enum ValueCategory {
+    VC_lvalue,
+    VC_rvalue
+  };
+
+  CPPReferenceType(CPPType *pointing_at, ValueCategory vcat=VC_lvalue);
 
   CPPType *_pointing_at;
+  ValueCategory _value_category;
+
+  inline bool is_lvalue() const;
+  inline bool is_rvalue() const;
 
   virtual bool is_fully_specified() const;
   virtual CPPDeclaration *substitute_decl(SubstDecl &subst,

+ 22 - 19
dtool/src/dtoolbase/dtoolbase_cc.h

@@ -34,6 +34,7 @@ using namespace std;
 #define INLINE inline
 #define TYPENAME typename
 #define CONSTEXPR
+#define NOEXCEPT noexcept
 
 #define EXPORT_TEMPLATE_CLASS(expcl, exptp, classname)
 
@@ -121,15 +122,28 @@ typedef ios::seekdir ios_seekdir;
 #endif
 
 #if defined(__has_extension) // Clang magic.
-#if __has_extension(cxx_constexpr)
-#define CONSTEXPR constexpr
-#else
-#define CONSTEXPR INLINE
-#endif
+#  if __has_extension(cxx_constexpr)
+#    define CONSTEXPR constexpr
+#  else
+#    define CONSTEXPR INLINE
+#  endif
+#  if __has_extension(cxx_noexcept)
+#    define NOEXCEPT noexcept
+#  else
+#    define NOEXCEPT
+#  endif
+#  if __has_extension(cxx_rvalue_references)
+#    define USE_MOVE_SEMANTICS
+#  endif
 #elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7)) && (__cplusplus >= 201103L)
-#define CONSTEXPR constexpr
+// noexcept was introduced in GCC 4.6, constexpr in GCC 4.7, rvalue refs in
+// GCC 4.3.  However, GCC only started defining __cplusplus properly in 4.7.
+#  define CONSTEXPR constexpr
+#  define NOEXCEPT noexcept
+#  define USE_MOVE_SEMANTICS
 #else
-#define CONSTEXPR INLINE
+#  define CONSTEXPR INLINE
+#  define NOEXCEPT
 #endif
 
 #if defined(WIN32_VC) && !defined(LINK_ALL_STATIC) && defined(EXPORT_TEMPLATES)
@@ -209,7 +223,7 @@ public:
   TauProfile(void *&tautimer, char *name, char *type, int group, char *group_name) {
     Tau_profile_c_timer(&tautimer, name, type, group, group_name);
     _tautimer = tautimer;
-    TAU_PROFILE_START(_tautimer); 
+    TAU_PROFILE_START(_tautimer);
   }
   ~TauProfile() {
     if (!__tau_shutdown) {
@@ -233,16 +247,5 @@ private:
 
 #endif  // USE_TAU
 
-// Macros from hell.
-#define EXT_METHOD(cl, m) Extension<cl>::m()
-#define EXT_METHOD_ARGS(cl, m, ...) Extension<cl>::m(__VA_ARGS__)
-#define EXT_CONST_METHOD(cl, m) Extension<cl>::m() const
-#define EXT_CONST_METHOD_ARGS(cl, m, ...) Extension<cl>::m(__VA_ARGS__) const
-#define EXT_NESTED_METHOD(cl1, cl2, m) Extension<cl1::cl2>::m()
-#define EXT_NESTED_METHOD_ARGS(cl1, cl2, m, ...) Extension<cl1::cl2>::m(__VA_ARGS__)
-#define EXT_NESTED_CONST_METHOD(cl1, cl2, m) Extension<cl1::cl2>::m() const
-#define EXT_NESTED_CONST_METHOD_ARGS(cl1, cl2, m, ...) Extension<cl1::cl2>::m(__VA_ARGS__) const
-#define CALL_EXT_METHOD(cl, m, obj, ...) invoke_extension(obj).m(__VA_ARGS__)
-
 #endif  //  __cplusplus
 #endif

+ 15 - 3
makepanda/makepanda.py

@@ -1108,8 +1108,6 @@ def CompileCxx(obj,src,opts):
                 cmd += ' -fno-inline-functions-called-once -fgcse-after-reload'
                 cmd += ' -frerun-cse-after-loop -frename-registers'
 
-            if not src.endswith(".c"):
-                cmd += " -fno-exceptions -fno-rtti"
             cmd += " -Wa,--noexecstack"
 
             # Now add specific release/debug flags.
@@ -1135,6 +1133,16 @@ def CompileCxx(obj,src,opts):
         else:
             cmd += " -pthread"
 
+        if not src.endswith(".c"):
+            # We don't use exceptions.
+            if 'EXCEPTIONS' not in opts:
+                cmd += " -fno-exceptions"
+
+            if 'RTTI' not in opts:
+                # We always disable RTTI on Android for memory usage reasons.
+                if optlevel >= 4 or GetTarget() == "android":
+                    cmd += " -fno-rtti"
+
         if PkgSkip("SSE2") == 0 and not arch.startswith("arm"):
             cmd += " -msse2"
 
@@ -2185,6 +2193,10 @@ def WriteConfigSettings():
     if (GetOptimize() >= 4):
         dtool_config["PRC_SAVE_DESCRIPTIONS"] = 'UNDEF'
 
+    if (GetOptimize() >= 4):
+        # Disable RTTI on release builds.
+        dtool_config["HAVE_RTTI"] = 'UNDEF'
+
     # Now that we have OS_SIMPLE_THREADS, we can support
     # SIMPLE_THREADS on exotic architectures like win64, so we no
     # longer need to disable it for this platform.
@@ -3665,7 +3677,7 @@ if (PkgSkip("ROCKET") == 0) and (not RUNTIME):
   TargetAdd('libp3rocket.dll', input=COMMON_PANDA_LIBS)
   TargetAdd('libp3rocket.dll', opts=OPTS)
 
-  OPTS=['DIR:panda/src/rocket', 'ROCKET']
+  OPTS=['DIR:panda/src/rocket', 'ROCKET', 'RTTI', 'EXCEPTIONS']
   IGATEFILES=GetDirectoryContents('panda/src/rocket', ["rocketInputHandler.h",
     "rocketInputHandler.cxx", "rocketRegion.h", "rocketRegion.cxx", "rocketRegion_ext.h"])
   TargetAdd('libp3rocket.in', opts=OPTS, input=IGATEFILES)

+ 68 - 0
panda/src/express/pointerTo.I

@@ -35,6 +35,40 @@ PointerTo(const PointerTo<T> &copy) :
 {
 }
 
+#ifdef USE_MOVE_SEMANTICS
+////////////////////////////////////////////////////////////////////
+//     Function: PointerTo::Move Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE PointerTo<T>::
+PointerTo(PointerTo<T> &&move) NOEXCEPT :
+  PointerToBase<T>((PointerToBase<T> &&)move)
+{
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: PointerTo::Move Assignment Operator
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE PointerTo<T> &PointerTo<T>::
+operator = (PointerTo<T> &&move) NOEXCEPT {
+  To *old_ptr = (To *)this->_void_ptr;
+
+  this->_void_ptr = move._void_ptr;
+  move._void_ptr = NULL;
+
+  if (old_ptr != (To *)NULL) {
+    unref_delete(old_ptr);
+  }
+
+  return *this;
+}
+#endif
+
 ////////////////////////////////////////////////////////////////////
 //     Function: PointerTo::Destructor
 //       Access: Public
@@ -166,6 +200,40 @@ ConstPointerTo(const ConstPointerTo<T> &copy) :
 {
 }
 
+#ifdef USE_MOVE_SEMANTICS
+////////////////////////////////////////////////////////////////////
+//     Function: ConstPointerTo::Move Constructor
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE ConstPointerTo<T>::
+ConstPointerTo(ConstPointerTo<T> &&move) NOEXCEPT :
+  PointerToBase<T>((PointerToBase<T> &&)move)
+{
+}
+
+////////////////////////////////////////////////////////////////////
+//     Function: ConstPointerTo::Move Assignment Operator
+//       Access: Public
+//  Description:
+////////////////////////////////////////////////////////////////////
+template<class T>
+INLINE ConstPointerTo<T> &ConstPointerTo<T>::
+operator = (ConstPointerTo<T> &&move) NOEXCEPT {
+  To *old_ptr = (To *)this->_void_ptr;
+
+  this->_void_ptr = move._void_ptr;
+  move._void_ptr = NULL;
+
+  if (old_ptr != (To *)NULL) {
+    unref_delete(old_ptr);
+  }
+
+  return *this;
+}
+#endif
+
 ////////////////////////////////////////////////////////////////////
 //     Function: ConstPointerTo::Dereference operator
 //       Access: Public

+ 10 - 0
panda/src/express/pointerTo.h

@@ -85,6 +85,11 @@ PUBLISHED:
   INLINE ~PointerTo();
 
 public:
+#ifdef USE_MOVE_SEMANTICS
+  INLINE PointerTo(PointerTo<T> &&move) NOEXCEPT;
+  INLINE PointerTo<T> &operator = (PointerTo<T> &&move) NOEXCEPT;
+#endif
+
   INLINE To &operator *() const;
   INLINE To *operator -> () const;
   // MSVC.NET 2005 insists that we use T *, and not To *, here.
@@ -144,6 +149,11 @@ PUBLISHED:
   INLINE ~ConstPointerTo();
 
 public:
+#ifdef USE_MOVE_SEMANTICS
+  INLINE ConstPointerTo(ConstPointerTo<T> &&move) NOEXCEPT;
+  INLINE ConstPointerTo<T> &operator = (ConstPointerTo<T> &&move) NOEXCEPT;
+#endif
+
   INLINE const To &operator *() const;
   INLINE const To *operator -> () const;
   INLINE operator const T *() const;

+ 14 - 0
panda/src/express/pointerToBase.I

@@ -35,6 +35,20 @@ PointerToBase(const PointerToBase<T> &copy) {
   reassign(copy);
 }
 
+////////////////////////////////////////////////////////////////////
+//     Function: PointerToBase::Move Constructor
+//       Access: Protected
+//  Description:
+////////////////////////////////////////////////////////////////////
+#ifdef USE_MOVE_SEMANTICS
+template<class T>
+INLINE PointerToBase<T>::
+PointerToBase(PointerToBase<T> &&move) NOEXCEPT {
+  _void_ptr = move._void_ptr;
+  move._void_ptr = (void *)NULL;
+}
+#endif
+
 ////////////////////////////////////////////////////////////////////
 //     Function: PointerToBase::Destructor
 //       Access: Protected

+ 4 - 0
panda/src/express/pointerToBase.h

@@ -38,6 +38,10 @@ protected:
   INLINE PointerToBase(const PointerToBase<T> &copy);
   INLINE ~PointerToBase();
 
+#ifdef USE_MOVE_SEMANTICS
+  INLINE PointerToBase(PointerToBase<T> &&move) NOEXCEPT;
+#endif
+
   INLINE void reassign(To *ptr);
   INLINE void reassign(const PointerToBase<To> &copy);
 

+ 50 - 9
panda/src/glstuff/glGraphicsStateGuardian_src.cxx

@@ -316,6 +316,7 @@ CLP(GraphicsStateGuardian)(GraphicsEngine *engine, GraphicsPipe *pipe) :
   _force_flush = gl_force_flush;
 
   _scissor_enabled = false;
+  _scissor_attrib_active = false;
 
 #ifdef DO_PSTATS
   if (gl_finish) {
@@ -2468,6 +2469,8 @@ prepare_display_region(DisplayRegionPipelineReader *dr) {
   _draw_buffer_type |= _current_properties->get_aux_mask();
   set_draw_buffer(_draw_buffer_type);
 
+  int count = dr->get_num_regions();
+
   if (dr->get_scissor_enabled()) {
     if (GLCAT.is_spam()) {
       GLCAT.spam()
@@ -2475,6 +2478,7 @@ prepare_display_region(DisplayRegionPipelineReader *dr) {
     }
     glEnable(GL_SCISSOR_TEST);
     _scissor_enabled = true;
+    _scissor_array.resize(count);
   } else {
     if (GLCAT.is_spam()) {
       GLCAT.spam()
@@ -2482,27 +2486,35 @@ prepare_display_region(DisplayRegionPipelineReader *dr) {
     }
     glDisable(GL_SCISSOR_TEST);
     _scissor_enabled = false;
+    _scissor_array.clear();
   }
 
+  _scissor_attrib_active = false;
+
 #ifndef OPENGLES
   if (_supports_viewport_arrays) {
-    int count = dr->get_num_regions();
+
     GLfloat *viewports = (GLfloat *)alloca(sizeof(GLfloat) * 4 * count);
-    GLint *scissors = (GLint *)alloca(sizeof(GLint) * 4 * count);
 
+    // We store the scissor regions in a vector since we may need
+    // to switch back to it in do_issue_scissor.
     for (int i = 0; i < count; ++i) {
-      GLint *sr = scissors + i * 4;
+      LVecBase4i sr;
       dr->get_region_pixels(i, sr[0], sr[1], sr[2], sr[3]);
       GLfloat *vr = viewports + i * 4;
       vr[0] = (GLfloat) sr[0];
       vr[1] = (GLfloat) sr[1];
       vr[2] = (GLfloat) sr[2];
       vr[3] = (GLfloat) sr[3];
+      if (_scissor_enabled) {
+        _scissor_array[i] = sr;
+      }
     }
     _glViewportArrayv(0, count, viewports);
-    if (dr->get_scissor_enabled()) {
-      _glScissorArrayv(0, count, scissors);
+    if (_scissor_enabled) {
+      _glScissorArrayv(0, count, _scissor_array[0].get_data());
     }
+
     if (GLCAT.is_spam()) {
       GLCAT.spam()
         << "glViewportArrayv(0, " << count << ", [\n";
@@ -2511,12 +2523,12 @@ prepare_display_region(DisplayRegionPipelineReader *dr) {
         GLCAT.spam(false) << vr[0] << ", " << vr[1] << ", " << vr[2] << ", " << vr[3] << ",\n";
       }
       GLCAT.spam(false) << "])\n";
-      if (dr->get_scissor_enabled()) {
+      if (_scissor_enabled) {
         GLCAT.spam()
           << "glScissorArrayv(0, " << count << ", [\n";
         for (int i = 0; i < count; ++i) {
-          GLint *sr = scissors + i * 4;
-          GLCAT.spam(false) << sr[0] << ", " << sr[1] << ", " << sr[2] << ", " << sr[3] << ",\n";
+          const LVecBase4i &sr = _scissor_array[i];
+          GLCAT.spam(false) << sr << ",\n";
         }
       }
       GLCAT.spam(false) << "])\n";
@@ -2526,9 +2538,13 @@ prepare_display_region(DisplayRegionPipelineReader *dr) {
 #endif  // OPENGLES
   {
     glViewport(x, y, width, height);
-    if (dr->get_scissor_enabled()) {
+    if (_scissor_enabled) {
       glScissor(x, y, width, height);
+
+      _scissor_array.resize(1);
+      _scissor_array[0].set(x, y, width, height);
     }
+
     if (GLCAT.is_spam()) {
       GLCAT.spam()
         << "glViewport(" << x << ", " << y << ", " << width << ", " << height << ")\n";
@@ -11984,5 +12000,30 @@ do_issue_scissor() {
         << "glScissor(" << x << ", " << y << ", " << width << ", " << height << ")\n";
     }
     glScissor(x, y, width, height);
+
+    _scissor_attrib_active = true;
+
+  } else if (_scissor_attrib_active) {
+    _scissor_attrib_active = false;
+
+    if (_scissor_array.size() > 0) {
+      // Scissoring is enabled on the display region.
+      // Revert to the scissor state specified in the DisplayRegion.
+      if (_supports_viewport_arrays) {
+        _glScissorArrayv(0, _scissor_array.size(), _scissor_array[0].get_data());
+      } else {
+        const LVecBase4i sr = _scissor_array[0];
+        glScissor(sr[0], sr[1], sr[2], sr[3]);
+      }
+
+    } else if (_scissor_enabled) {
+      // The display region had no scissor enabled.  Disable scissoring.
+      if (GLCAT.is_spam()) {
+        GLCAT.spam()
+          << "glDisable(GL_SCISSOR_TEST)\n";
+      }
+      glDisable(GL_SCISSOR_TEST);
+      _scissor_enabled = false;
+    }
   }
 }

+ 2 - 0
panda/src/glstuff/glGraphicsStateGuardian_src.h

@@ -550,6 +550,8 @@ protected:
   bool _point_perspective;
   bool _vertex_blending_enabled;
   bool _scissor_enabled;
+  bool _scissor_attrib_active;
+  epvector<LVecBase4i> _scissor_array;
 
 #ifndef OPENGLES_1
   PT(Shader) _current_shader;

+ 4 - 4
panda/src/pgraph/config_pgraph.cxx

@@ -257,12 +257,12 @@ ConfigVariableBool retransform_sprites
           "necessary in order for fog to work correctly on the sprites."));
 
 ConfigVariableBool depth_offset_decals
-("depth-offset-decals", false,
+("depth-offset-decals", true,
  PRC_DESC("Set this true to allow decals to be implemented via the advanced "
           "depth offset feature, if supported, instead of via the traditional "
-          "(and slower) two-pass approach.  This is false by default "
-          "because it appears that many graphics drivers have issues with "
-          "their depth offset implementation."));
+          "(and slower) two-pass approach.  This is currently the only method "
+          "by which decals are implemented in Panda3D, and as such, this "
+          "setting is ignored."));
 
 ConfigVariableInt max_collect_vertices
 ("max-collect-vertices", 65534,

+ 6 - 51
panda/src/pgraph/cullBin.cxx

@@ -27,7 +27,7 @@ TypeHandle CullBin::_type_handle;
 ////////////////////////////////////////////////////////////////////
 //     Function: CullBin::Destructor
 //       Access: Public, Virtual
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 CullBin::
 ~CullBin() {
@@ -115,7 +115,7 @@ check_flash_color() {
   } else {
     _has_flash_color = false;
     pgraph_cat.warning()
-      << "Invalid value for flash-bin-" << _name << ": " 
+      << "Invalid value for flash-bin-" << _name << ": "
       << flash_bin.get_string_value() << "\n";
   }
 #endif  // NDEBUG
@@ -124,7 +124,7 @@ check_flash_color() {
 ////////////////////////////////////////////////////////////////////
 //     Function: CullBin::ResultGraphBuilder::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 CullBin::ResultGraphBuilder::
 ResultGraphBuilder(PandaNode *root_node) :
@@ -142,9 +142,8 @@ ResultGraphBuilder(PandaNode *root_node) :
 ////////////////////////////////////////////////////////////////////
 void CullBin::ResultGraphBuilder::
 add_object(CullableObject *object) {
-  if (_current_transform != object->_modelview_transform || 
-      _current_state != object->_state || 
-      object->is_fancy()) {
+  if (_current_transform != object->_modelview_transform ||
+      _current_state != object->_state) {
     // Create a new GeomNode to hold the net transform and state.  We
     // choose to create a new GeomNode for each new state, to make it
     // clearer to the observer when the state changes.
@@ -157,57 +156,13 @@ add_object(CullableObject *object) {
   }
 
   record_one_object(_current_node, object);
-
-  if (object->get_next() != (CullableObject *)NULL) {
-    // Collect the decal base pieces.
-    CullableObject *base = object->get_next();
-    while (base != (CullableObject *)NULL && base->_geom != (Geom *)NULL) {
-      record_one_object(_current_node, base);
-      base = base->get_next();
-    }
-
-    if (base != (CullableObject *)NULL) {
-      // Now, collect all the decals.
-      _current_node->set_effect(DecalEffect::make());
-      int decal_index = 0;
-
-      CPT(TransformState) transform;
-      CPT(RenderState) state;
-      PT(GeomNode) decal_node;
-      CullableObject *decal = base->get_next();
-      while (decal != (CullableObject *)NULL) {
-        if (transform != decal->_modelview_transform || 
-            state != decal->_state || 
-            decal->get_next() != (CullableObject *)NULL) {
-          // Create a new GeomNode to hold the net transform.
-          transform = decal->_modelview_transform;
-          state = decal->_state;
-          decal_node = new GeomNode("decal_" + format_string(decal_index));
-          _current_node->add_child(decal_node);
-          decal_node->set_transform(transform);
-          decal_node->set_state(state);
-        }
-        
-        record_one_object(decal_node, decal);
-        decal = decal->get_next();
-        ++decal_index;
-      }
-    }
-
-    // Reset the current node pointer for next time so the decal root
-    // will remain in its own node.
-    _current_node.clear();
-    _current_transform.clear();
-    _current_state.clear();
-  }
-
   ++_object_index;
 }
 
 ////////////////////////////////////////////////////////////////////
 //     Function: CullBin::ResultGraphBuilder::record_one_object
 //       Access: Private
-//  Description: Records a single object, without regard to decalling.
+//  Description: Records a single object.
 ////////////////////////////////////////////////////////////////////
 void CullBin::ResultGraphBuilder::
 record_one_object(GeomNode *node, CullableObject *object) {

+ 13 - 61
panda/src/pgraph/cullResult.cxx

@@ -57,7 +57,7 @@ static const double bin_color_flash_rate = 1.0;  // 1 state change per second
 ////////////////////////////////////////////////////////////////////
 //     Function: CullResult::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 CullResult::
 CullResult(GraphicsStateGuardianBase *gsg,
@@ -87,7 +87,7 @@ make_next() const {
 
   for (size_t i = 0; i < _bins.size(); ++i) {
     CullBin *old_bin = _bins[i];
-    if (old_bin == (CullBin *)NULL || 
+    if (old_bin == (CullBin *)NULL ||
         old_bin->get_bin_type() != bin_manager->get_bin_type(i)) {
       new_result->_bins.push_back((CullBin *)NULL);
     } else {
@@ -166,21 +166,19 @@ add_object(CullableObject *object, const CullTraverser *traverser) {
       // later.  This means we must copy the object and add it to
       // both bins.  We can only do this if we do not have an
       // explicit bin already applied; otherwise, M_dual falls back
-      // to M_alpha. 
+      // to M_alpha.
       {
         const CullBinAttrib *bin_attrib = DCAST(CullBinAttrib, state->get_attrib(CullBinAttrib::get_class_slot()));
-        if (bin_attrib == (CullBinAttrib *)NULL || 
+        if (bin_attrib == (CullBinAttrib *)NULL ||
             bin_attrib->get_bin_name().empty()) {
-          // We make a copy of the object to draw the transparent part
-          // without decals; this gets placed in the transparent bin.
+          // We make a copy of the object to draw the transparent part;
+          // this gets placed in the transparent bin.
 #ifndef NDEBUG
-          if (m_dual_transparent) 
+          if (m_dual_transparent)
 #endif
             {
               CullableObject *transparent_part = new CullableObject(*object);
-              CPT(RenderState) transparent_state = object->has_decals() ? 
-                get_dual_transparent_state_decals() : 
-                get_dual_transparent_state();
+              CPT(RenderState) transparent_state = get_dual_transparent_state();
               transparent_part->_state = state->compose(transparent_state);
               if (transparent_part->munge_geom
                   (_gsg, _gsg->get_geom_munger(transparent_part->_state, current_thread),
@@ -195,9 +193,9 @@ add_object(CullableObject *object, const CullTraverser *traverser) {
                 delete transparent_part;
               }
             }
-          
-          // Now we can draw the opaque part, with decals.  This will
-          // end up in the opaque bin.
+
+          // Now we can draw the opaque part.  This will end up in
+          // the opaque bin.
           object->_state = state->compose(get_dual_opaque_state());
 #ifndef NDEBUG
           if (!m_dual_opaque) {
@@ -210,7 +208,7 @@ add_object(CullableObject *object, const CullTraverser *traverser) {
         // M_alpha.
       }
       break;
-      
+
     default:
       // Other kinds of transparency need no special handling.
       break;
@@ -443,8 +441,6 @@ check_flash_transparency(CPT(RenderState) &state, const LColor &transparency) {
 //       Access: Private
 //  Description: Returns a RenderState that renders only the
 //               transparent parts of an object, in support of M_dual.
-//               This state is suitable only for objects that do not
-//               contain decals.
 ////////////////////////////////////////////////////////////////////
 CPT(RenderState) CullResult::
 get_dual_transparent_state() {
@@ -454,10 +450,7 @@ get_dual_transparent_state() {
     // and hence filling up the depth buffer with large empty spaces
     // that may obscure other things.  However, this does mean we draw
     // pixels twice where the alpha == 1.0 (since they were already
-    // drawn in the opaque pass).  This is not normally a problem,
-    // except when we are using decals; in the case of decals, we
-    // don't want to draw the 1.0 pixels again, since these are the
-    // ones that may have been decaled onto.
+    // drawn in the opaque pass).  This is not normally a problem.
     state = RenderState::make(AlphaTestAttrib::make(AlphaTestAttrib::M_greater, 0.0f),
                               TransparencyAttrib::make(TransparencyAttrib::M_alpha),
                               DepthWriteAttrib::make(DepthWriteAttrib::M_off),
@@ -487,47 +480,6 @@ get_dual_transparent_state() {
   return state;
 }
 
-////////////////////////////////////////////////////////////////////
-//     Function: CullResult::get_dual_transparent_state_decals
-//       Access: Private
-//  Description: Returns a RenderState that renders only the
-//               transparent parts of an object, but suitable for
-//               objects that contain decals.
-////////////////////////////////////////////////////////////////////
-CPT(RenderState) CullResult::
-get_dual_transparent_state_decals() {
-  static CPT(RenderState) state = NULL;
-  if (state == (const RenderState *)NULL) {
-    // This is exactly the same as above except here we make the alpha
-    // test of < 1.0 instead of > 0.0.  This makes us draw big empty
-    // pixels where the alpha values are 0.0, but we don't overwrite
-    // the decals where the pixels are 1.0.
-    state = RenderState::make(AlphaTestAttrib::make(AlphaTestAttrib::M_less, dual_opaque_level),
-                              TransparencyAttrib::make(TransparencyAttrib::M_alpha),
-                              DepthWriteAttrib::make(DepthWriteAttrib::M_off),
-                              RenderState::get_max_priority());
-  }
-
-#ifndef NDEBUG
-  if (m_dual_flash) {
-    int cycle = (int)(ClockObject::get_global_clock()->get_frame_time() * bin_color_flash_rate);
-    if ((cycle & 1) == 0) {
-      static CPT(RenderState) flash_state = NULL;
-      if (flash_state == (const RenderState *)NULL) {
-        flash_state = state->add_attrib(ColorAttrib::make_flat(LColor(0.8f, 0.2, 0.2, 1.0f)),
-                                        RenderState::get_max_priority());
-        flash_state = flash_state->add_attrib(ColorScaleAttrib::make(LVecBase4(1.0f, 1.0f, 1.0f, 1.0f)),
-                                              RenderState::get_max_priority());
-
-      }
-      return flash_state;
-    }
-  }
-#endif  // NDEBUG
-
-  return state;
-}
-
 ////////////////////////////////////////////////////////////////////
 //     Function: CullResult::get_dual_opaque_state
 //       Access: Private

+ 2 - 5
panda/src/pgraph/cullResult.h

@@ -72,12 +72,11 @@ private:
   static CPT(RenderState) get_alpha_state();
   static CPT(RenderState) get_binary_state();
   static CPT(RenderState) get_dual_transparent_state();
-  static CPT(RenderState) get_dual_transparent_state_decals();
   static CPT(RenderState) get_dual_opaque_state();
 
   GraphicsStateGuardianBase *_gsg;
   PStatCollector _draw_region_pcollector;
-  
+
   typedef pvector< PT(CullBin) > Bins;
   Bins _bins;
 
@@ -90,7 +89,7 @@ public:
     register_type(_type_handle, "CullResult",
                   ReferenceCount::get_class_type());
   }
-  
+
 private:
   static TypeHandle _type_handle;
 };
@@ -99,5 +98,3 @@ private:
 
 #endif
 
-
-  

+ 4 - 4
panda/src/pgraph/cullTraverser.I

@@ -110,12 +110,12 @@ get_initial_state() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: CullTraverser::get_depth_offset_decals
 //       Access: Published
-//  Description: Returns the depth_offset_decals flag.  See
-//               set_depth_offset_decals().
+//  Description: Returns true, as depth offsets are the only way
+//               that we implement decals nowadays.
 ////////////////////////////////////////////////////////////////////
 INLINE bool CullTraverser::
 get_depth_offset_decals() const {
-  return _depth_offset_decals;
+  return true;
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -199,7 +199,7 @@ get_cull_handler() const {
 ////////////////////////////////////////////////////////////////////
 //     Function: CullTraverser::set_portal_clipper
 //       Access: Published
-//  Description: Specifies _portal_clipper object pointer that 
+//  Description: Specifies _portal_clipper object pointer that
 //               subsequent traverse() or traverse_below may use.
 ////////////////////////////////////////////////////////////////////
 INLINE void CullTraverser::

+ 67 - 303
panda/src/pgraph/cullTraverser.cxx

@@ -47,7 +47,7 @@ TypeHandle CullTraverser::_type_handle;
 ////////////////////////////////////////////////////////////////////
 //     Function: CullTraverser::Constructor
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 CullTraverser::
 CullTraverser() :
@@ -65,7 +65,7 @@ CullTraverser() :
 ////////////////////////////////////////////////////////////////////
 //     Function: CullTraverser::Copy Constructor
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 CullTraverser::
 CullTraverser(const CullTraverser &copy) :
@@ -76,7 +76,6 @@ CullTraverser(const CullTraverser &copy) :
   _has_tag_state_key(copy._has_tag_state_key),
   _tag_state_key(copy._tag_state_key),
   _initial_state(copy._initial_state),
-  _depth_offset_decals(copy._depth_offset_decals),
   _view_frustum(copy._view_frustum),
   _cull_handler(copy._cull_handler),
   _portal_clipper(copy._portal_clipper),
@@ -98,7 +97,6 @@ set_scene(SceneSetup *scene_setup, GraphicsStateGuardianBase *gsg,
   _gsg = gsg;
 
   _initial_state = scene_setup->get_initial_state();
-  _depth_offset_decals = _gsg->depth_offset_decals() && depth_offset_decals;
 
   _current_thread = Thread::get_current_thread();
 
@@ -129,43 +127,43 @@ traverse(const NodePath &root) {
     PT(BoundingVolume) bv = _scene_setup->get_lens()->make_bounds();
     if (bv != (BoundingVolume *)NULL &&
         bv->is_of_type(GeometricBoundingVolume::get_class_type())) {
-      
+
       local_frustum = DCAST(GeometricBoundingVolume, bv);
     }
-      
+
     // This local_frustum is in camera space
     PortalClipper portal_viewer(local_frustum, _scene_setup);
     if (debug_portal_cull) {
       portal_viewer.draw_camera_frustum();
     }
-    
+
     // Store this pointer in this
     set_portal_clipper(&portal_viewer);
 
     CullTraverserData data(root, TransformState::make_identity(),
-                           _initial_state, _view_frustum, 
+                           _initial_state, _view_frustum,
                            _current_thread);
-    
+
     traverse(data);
-    
+
     // Finally add the lines to be drawn
     if (debug_portal_cull) {
       portal_viewer.draw_lines();
     }
-    
+
     // Render the frustum relative to the cull center.
     NodePath cull_center = _scene_setup->get_cull_center();
     CPT(TransformState) transform = cull_center.get_transform(root);
-    
+
     CullTraverserData my_data(data, portal_viewer._previous);
     my_data._net_transform = my_data._net_transform->compose(transform);
     traverse(my_data);
 
   } else {
     CullTraverserData data(root, TransformState::make_identity(),
-                           _initial_state, _view_frustum, 
+                           _initial_state, _view_frustum,
                            _current_thread);
-    
+
     traverse(data);
   }
 }
@@ -181,7 +179,7 @@ void CullTraverser::
 traverse(CullTraverserData &data) {
   if (is_in_view(data)) {
     if (pgraph_cat.is_spam()) {
-      pgraph_cat.spam() 
+      pgraph_cat.spam()
         << "\n" << data._node_path
         << " " << data._draw_mask << "\n";
     }
@@ -207,9 +205,9 @@ traverse(CullTraverserData &data) {
         // up now.
         show_bounds(data, node_effects->has_show_tight_bounds());
       }
-      
+
       data.apply_transform_and_state(this);
-      
+
       const FogAttrib *fog = DCAST(FogAttrib, node_reader->get_state()->get_attrib(FogAttrib::get_class_slot()));
       if (fog != (const FogAttrib *)NULL && fog->get_fog() != (Fog *)NULL) {
         // If we just introduced a FogAttrib here, call adjust_to_camera()
@@ -218,7 +216,7 @@ traverse(CullTraverserData &data) {
         // need for it.
         fog->get_fog()->adjust_to_camera(get_camera_transform());
       }
-      
+
       if (fancy_bits & PandaNode::FB_cull_callback) {
         PandaNode *node = data.node();
         if (!node->cull_callback(this, data)) {
@@ -247,47 +245,41 @@ traverse_below(CullTraverserData &data) {
 
   const RenderEffects *node_effects = node_reader->get_effects();
   bool has_decal = !this_node_hidden && node_effects->has_decal();
-  if (has_decal && !_depth_offset_decals) {
-    // Start the three-pass decal rendering if we're not using
-    // DepthOffsetAttribs to implement decals.
-    start_decal(data);
-    
-  } else {
-    if (!this_node_hidden) {
-      node->add_for_draw(this, data);
-    }
 
-    if (has_decal) {
-      // If we *are* implementing decals with DepthOffsetAttribs,
-      // apply it now, so that each child of this node gets offset by
-      // a tiny amount.
-      data._state = data._state->compose(get_depth_offset_state());
+  if (!this_node_hidden) {
+    node->add_for_draw(this, data);
+  }
+
+  if (has_decal) {
+    // If we *are* implementing decals with DepthOffsetAttribs,
+    // apply it now, so that each child of this node gets offset by
+    // a tiny amount.
+    data._state = data._state->compose(get_depth_offset_state());
 #ifndef NDEBUG
-      // This is just a sanity check message.
-      if (!node->is_geom_node()) {
-        pgraph_cat.error()
-          << "DecalEffect applied to " << *node << ", not a GeomNode.\n";
-      }
+    // This is just a sanity check message.
+    if (!node->is_geom_node()) {
+      pgraph_cat.error()
+        << "DecalEffect applied to " << *node << ", not a GeomNode.\n";
+    }
 #endif
+  }
+
+  // Now visit all the node's children.
+  PandaNode::Children children = node_reader->get_children();
+  node_reader->release();
+  int num_children = children.get_num_children();
+  if (node->has_selective_visibility()) {
+    int i = node->get_first_visible_child();
+    while (i < num_children) {
+      CullTraverserData next_data(data, children.get_child(i));
+      traverse(next_data);
+      i = node->get_next_visible_child(i);
     }
 
-    // Now visit all the node's children.
-    PandaNode::Children children = node_reader->get_children();
-    node_reader->release();
-    int num_children = children.get_num_children();
-    if (node->has_selective_visibility()) {
-      int i = node->get_first_visible_child();
-      while (i < num_children) {
-        CullTraverserData next_data(data, children.get_child(i));
-        traverse(next_data);
-        i = node->get_next_visible_child(i);
-      }
-      
-    } else {
-      for (int i = 0; i < num_children; i++) {
-        CullTraverserData next_data(data, children.get_child(i));
-        traverse(next_data);
-      }
+  } else {
+    for (int i = 0; i < num_children; i++) {
+      CullTraverserData next_data(data, children.get_child(i));
+      traverse(next_data);
     }
   }
 }
@@ -311,20 +303,20 @@ end_traverse() {
 //               bounding volume.
 ////////////////////////////////////////////////////////////////////
 void CullTraverser::
-draw_bounding_volume(const BoundingVolume *vol, 
+draw_bounding_volume(const BoundingVolume *vol,
                      const TransformState *net_transform,
                      const TransformState *modelview_transform) const {
   PT(Geom) bounds_viz = make_bounds_viz(vol);
-  
+
   if (bounds_viz != (Geom *)NULL) {
     _geoms_pcollector.add_level(2);
-    CullableObject *outer_viz = 
-      new CullableObject(bounds_viz, get_bounds_outer_viz_state(), 
+    CullableObject *outer_viz =
+      new CullableObject(bounds_viz, get_bounds_outer_viz_state(),
                          net_transform, modelview_transform, get_scene());
     _cull_handler->record_object(outer_viz, this);
-    
-    CullableObject *inner_viz = 
-      new CullableObject(bounds_viz, get_bounds_inner_viz_state(), 
+
+    CullableObject *inner_viz =
+      new CullableObject(bounds_viz, get_bounds_inner_viz_state(),
                          net_transform, modelview_transform, get_scene());
     _cull_handler->record_object(inner_viz, this);
   }
@@ -360,13 +352,13 @@ show_bounds(CullTraverserData &data, bool tight) {
 
     if (bounds_viz != (Geom *)NULL) {
       _geoms_pcollector.add_level(1);
-      CullableObject *outer_viz = 
-        new CullableObject(bounds_viz, get_bounds_outer_viz_state(), 
+      CullableObject *outer_viz =
+        new CullableObject(bounds_viz, get_bounds_outer_viz_state(),
                            net_transform, modelview_transform,
                            get_scene());
       _cull_handler->record_object(outer_viz, this);
     }
-    
+
   } else {
     draw_bounding_volume(node->get_bounds(),
                          net_transform, modelview_transform);
@@ -378,7 +370,7 @@ show_bounds(CullTraverserData &data, bool tight) {
       GeomNode *gnode = DCAST(GeomNode, node);
       int num_geoms = gnode->get_num_geoms();
       for (int i = 0; i < num_geoms; ++i) {
-        draw_bounding_volume(gnode->get_geom(i)->get_bounds(), 
+        draw_bounding_volume(gnode->get_geom(i)->get_bounds(),
                              net_transform, modelview_transform);
       }
     }
@@ -407,7 +399,7 @@ make_bounds_viz(const BoundingVolume *vol) {
       ("bounds", GeomVertexFormat::get_v3(),
        Geom::UH_stream);
     GeomVertexWriter vertex(vdata, InternalName::get_vertex());
-    
+
     PT(GeomTristrips) strip = new GeomTristrips(Geom::UH_stream);
     for (int sl = 0; sl < num_slices; ++sl) {
       PN_stdfloat longitude0 = (PN_stdfloat)sl / (PN_stdfloat)num_slices;
@@ -419,11 +411,11 @@ make_bounds_viz(const BoundingVolume *vol) {
         vertex.add_data3(compute_point(sphere, latitude, longitude1));
       }
       vertex.add_data3(compute_point(sphere, 1.0, longitude0));
-      
+
       strip->add_next_vertices(num_stacks * 2);
       strip->close_primitive();
     }
-    
+
     geom = new Geom(vdata);
     geom->add_primitive(strip);
 
@@ -438,7 +430,7 @@ make_bounds_viz(const BoundingVolume *vol) {
     for (int i = 0; i < 8; ++i ) {
       vertex.add_data3(fvol->get_point(i));
     }
-    
+
     PT(GeomLines) lines = new GeomLines(Geom::UH_stream);
     lines->add_vertices(0, 1); lines->close_primitive();
     lines->add_vertices(1, 2); lines->close_primitive();
@@ -472,7 +464,7 @@ make_bounds_viz(const BoundingVolume *vol) {
     for (int i = 0; i < 8; ++i ) {
       vertex.add_data3(box.get_point(i));
     }
-    
+
     PT(GeomTriangles) tris = new GeomTriangles(Geom::UH_stream);
     tris->add_vertices(0, 4, 5);
     tris->close_primitive();
@@ -533,7 +525,7 @@ make_tight_bounds_viz(PandaNode *node) const {
       Geom::UH_stream);
     GeomVertexWriter vertex(vdata, InternalName::get_vertex(),
                             _current_thread);
-    
+
     vertex.add_data3(n[0], n[1], n[2]);
     vertex.add_data3(n[0], n[1], x[2]);
     vertex.add_data3(n[0], x[1], n[2]);
@@ -542,7 +534,7 @@ make_tight_bounds_viz(PandaNode *node) const {
     vertex.add_data3(x[0], n[1], x[2]);
     vertex.add_data3(x[0], x[1], n[2]);
     vertex.add_data3(x[0], x[1], x[2]);
-  
+
     PT(GeomLinestrips) strip = new GeomLinestrips(Geom::UH_stream);
 
     // We wind one long linestrip around the wireframe cube.  This
@@ -564,7 +556,7 @@ make_tight_bounds_viz(PandaNode *node) const {
     strip->add_vertex(5);
     strip->add_vertex(1);
     strip->close_primitive();
-      
+
     geom = new Geom(vdata);
     geom->add_primitive(strip);
   }
@@ -576,10 +568,10 @@ make_tight_bounds_viz(PandaNode *node) const {
 //     Function: CullTraverser::compute_point
 //       Access: Private, Static
 //  Description: Returns a point on the surface of the sphere.
-//               latitude and longitude range from 0.0 to 1.0.  
+//               latitude and longitude range from 0.0 to 1.0.
 ////////////////////////////////////////////////////////////////////
 LVertex CullTraverser::
-compute_point(const BoundingSphere *sphere, 
+compute_point(const BoundingSphere *sphere,
               PN_stdfloat latitude, PN_stdfloat longitude) {
   PN_stdfloat s1, c1;
   csincos(latitude * MathNumbers::pi, &s1, &c1);
@@ -648,231 +640,3 @@ get_depth_offset_state() {
   }
   return state;
 }
-
-
-////////////////////////////////////////////////////////////////////
-//     Function: CullTraverser::start_decal
-//       Access: Private
-//  Description: Collects a base node and all of the decals applied to
-//               it.  This involves recursing below the base GeomNode
-//               to find all the decal geoms.
-////////////////////////////////////////////////////////////////////
-void CullTraverser::
-start_decal(const CullTraverserData &data) {
-  PandaNode *node = data.node();
-  if (!node->is_geom_node()) {
-    pgraph_cat.error()
-      << "DecalEffect applied to " << *node << ", not a GeomNode.\n";
-    return;
-  }
-
-  const PandaNodePipelineReader *node_reader = data.node_reader();
-
-  // Build a chain of CullableObjects.  The head of the chain will be
-  // all of the base Geoms in order, followed by an empty
-  // CullableObject node, followed by all of the decal Geoms, in
-  // order.
-
-  // Since the CullableObject is a linked list which gets built in
-  // LIFO order, we start with the decals.
-  CullableObject *decals = (CullableObject *)NULL;
-  PandaNode::Children cr = node_reader->get_children();
-  int num_children = cr.get_num_children();
-  if (node->has_selective_visibility()) {
-    int i = node->get_first_visible_child();
-    while (i < num_children) {
-      CullTraverserData next_data(data, cr.get_child(i));
-      decals = r_get_decals(next_data, decals);
-      i = node->get_next_visible_child(i);
-    }
-    
-  } else {
-    for (int i = num_children - 1; i >= 0; i--) {
-      CullTraverserData next_data(data, cr.get_child(i));
-      decals = r_get_decals(next_data, decals);
-    }
-  }
-
-  // Now create a new, empty CullableObject to separate the decals
-  // from the non-decals.
-  CullableObject *separator = new CullableObject;
-  separator->set_next(decals);
-
-  // And now get the base Geoms, again in reverse order.
-  CullableObject *object = separator;
-  GeomNode *geom_node = DCAST(GeomNode, node);
-  GeomNode::Geoms geoms = geom_node->get_geoms();
-  int num_geoms = geoms.get_num_geoms();
-  _geoms_pcollector.add_level(num_geoms);
-  CPT(TransformState) net_transform = data.get_net_transform(this);
-  CPT(TransformState) modelview_transform = data.get_modelview_transform(this);
-  CPT(TransformState) internal_transform = _scene_setup->get_cs_transform()->compose(modelview_transform);
-  
-  for (int i = num_geoms - 1; i >= 0; i--) {
-    const Geom *geom = geoms.get_geom(i);
-    if (geom->is_empty()) {
-      continue;
-    }
-
-    CPT(RenderState) state = data._state->compose(geoms.get_geom_state(i));
-    if (state->has_cull_callback() && !state->cull_callback(this, data)) {
-      // Cull.
-      continue;
-    }
-    
-    // Cull the Geom bounding volume against the view frustum
-    // and/or the cull planes.  Don't bother unless we've got more
-    // than one Geom, since otherwise the bounding volume of the
-    // GeomNode is (probably) the same as that of the one Geom,
-    // and we've already culled against that.
-    if (num_geoms > 1) {
-      if (data._view_frustum != (GeometricBoundingVolume *)NULL) {
-        // Cull the individual Geom against the view frustum.
-        CPT(BoundingVolume) geom_volume = geom->get_bounds();
-        const GeometricBoundingVolume *geom_gbv =
-          DCAST(GeometricBoundingVolume, geom_volume);
-        
-        int result = data._view_frustum->contains(geom_gbv);
-        if (result == BoundingVolume::IF_no_intersection) {
-          // Cull this Geom.
-          continue;
-        }
-      }
-      if (!data._cull_planes->is_empty()) {
-        // Also cull the Geom against the cull planes.
-        CPT(BoundingVolume) geom_volume = geom->get_bounds();
-        const GeometricBoundingVolume *geom_gbv =
-          DCAST(GeometricBoundingVolume, geom_volume);
-        int result;
-        data._cull_planes->do_cull(result, state, geom_gbv);
-        if (result == BoundingVolume::IF_no_intersection) {
-          // Cull.
-          continue;
-        }
-      }
-    }
-
-    CullableObject *next = object;
-    object =
-      new CullableObject(geom, state, net_transform, 
-                         modelview_transform, internal_transform);
-    object->set_next(next);
-  }
-
-  if (object != separator) {
-    // Finally, send the whole list down to the CullHandler for
-    // processing.  The first Geom in the node now represents the
-    // overall state.
-    _cull_handler->record_object(object, this);
-  } else {
-    // Never mind; there's nothing to render.
-    delete object;
-  }
-}
-
-////////////////////////////////////////////////////////////////////
-//     Function: CullTraverser::r_get_decals
-//       Access: Private
-//  Description: Recursively gets all the decals applied to a
-//               particular GeomNode.  These are built into a
-//               CullableObject list in LIFO order (so that the
-//               traversing the list will extract them in the order
-//               they were encountered in the scene graph).
-////////////////////////////////////////////////////////////////////
-CullableObject *CullTraverser::
-r_get_decals(CullTraverserData &data, CullableObject *decals) {
-  if (is_in_view(data)) {
-    PandaNodePipelineReader *node_reader = data.node_reader();
-    PandaNode *node = data.node();
-
-    const RenderEffects *node_effects = node_reader->get_effects();
-    if (node_effects->has_show_bounds()) {
-      // If we should show the bounding volume for this node, make it
-      // up now.
-      show_bounds(data, node_effects->has_show_tight_bounds());
-    }
-
-    data.apply_transform_and_state(this);
-
-    // First, visit all of the node's children.
-    int num_children = node_reader->get_num_children();
-    if (node->has_selective_visibility()) {
-      int i = node->get_first_visible_child();
-      while (i < num_children) {
-        CullTraverserData next_data(data, node_reader->get_child(i));
-        decals = r_get_decals(next_data, decals);
-        i = node->get_next_visible_child(i);
-      }
-      
-    } else {
-      for (int i = num_children - 1; i >= 0; i--) {
-        CullTraverserData next_data(data, node_reader->get_child(i));
-        decals = r_get_decals(next_data, decals);
-      }
-    }
-
-    // Now, tack on any geoms within the node.
-    if (node->is_geom_node()) {
-      GeomNode *geom_node = DCAST(GeomNode, node);
-      GeomNode::Geoms geoms = geom_node->get_geoms();
-      int num_geoms = geoms.get_num_geoms();
-      _geoms_pcollector.add_level(num_geoms);
-      CPT(TransformState) net_transform = data.get_net_transform(this);
-      CPT(TransformState) modelview_transform = data.get_modelview_transform(this);
-      CPT(TransformState) internal_transform = _scene_setup->get_cs_transform()->compose(modelview_transform);
-
-      for (int i = num_geoms - 1; i >= 0; i--) {
-        const Geom *geom = geoms.get_geom(i);
-        if (geom->is_empty()) {
-          continue;
-        }
-
-        CPT(RenderState) state = data._state->compose(geoms.get_geom_state(i));
-        if (state->has_cull_callback() && !state->cull_callback(this, data)) {
-          // Cull.
-          continue;
-        }
-
-        // Cull the Geom bounding volume against the view frustum
-        // and/or the cull planes.  Don't bother unless we've got more
-        // than one Geom, since otherwise the bounding volume of the
-        // GeomNode is (probably) the same as that of the one Geom,
-        // and we've already culled against that.
-        if (num_geoms > 1) {
-          if (data._view_frustum != (GeometricBoundingVolume *)NULL) {
-            // Cull the individual Geom against the view frustum.
-            CPT(BoundingVolume) geom_volume = geom->get_bounds();
-            const GeometricBoundingVolume *geom_gbv =
-              DCAST(GeometricBoundingVolume, geom_volume);
-
-            int result = data._view_frustum->contains(geom_gbv);
-            if (result == BoundingVolume::IF_no_intersection) {
-              // Cull this Geom.
-              continue;
-            }
-          }
-          if (!data._cull_planes->is_empty()) {
-            // Also cull the Geom against the cull planes.
-            CPT(BoundingVolume) geom_volume = geom->get_bounds();
-            const GeometricBoundingVolume *geom_gbv =
-              DCAST(GeometricBoundingVolume, geom_volume);
-            int result;
-            data._cull_planes->do_cull(result, state, geom_gbv);
-            if (result == BoundingVolume::IF_no_intersection) {
-              // Cull.
-              continue;
-            }
-          }
-        }
-
-        CullableObject *next = decals;
-        decals =
-          new CullableObject(geom, state, net_transform, 
-                             modelview_transform, internal_transform);
-        decals->set_next(next);
-      }
-    }
-  }
-
-  return decals;
-}

+ 4 - 5
panda/src/pgraph/cullTraverser.h

@@ -87,7 +87,7 @@ PUBLISHED:
 
   INLINE static void flush_level();
 
-  void draw_bounding_volume(const BoundingVolume *vol, 
+  void draw_bounding_volume(const BoundingVolume *vol,
                             const TransformState *net_transform,
                             const TransformState *modelview_transform) const;
 
@@ -105,7 +105,7 @@ private:
   void show_bounds(CullTraverserData &data, bool tight);
   static PT(Geom) make_bounds_viz(const BoundingVolume *vol);
   PT(Geom) make_tight_bounds_viz(PandaNode *node) const;
-  static LVertex compute_point(const BoundingSphere *sphere, 
+  static LVertex compute_point(const BoundingSphere *sphere,
                                PN_stdfloat latitude, PN_stdfloat longitude);
   static CPT(RenderState) get_bounds_outer_viz_state();
   static CPT(RenderState) get_bounds_inner_viz_state();
@@ -121,12 +121,11 @@ private:
   bool _has_tag_state_key;
   string _tag_state_key;
   CPT(RenderState) _initial_state;
-  bool _depth_offset_decals;
   PT(GeometricBoundingVolume) _view_frustum;
   CullHandler *_cull_handler;
   PortalClipper *_portal_clipper;
   bool _effective_incomplete_render;
-  
+
 public:
   static TypeHandle get_class_type() {
     return _type_handle;
@@ -150,4 +149,4 @@ private:
 #endif
 
 
-  
+

+ 22 - 96
panda/src/pgraph/cullableObject.I

@@ -12,6 +12,7 @@
 //
 ////////////////////////////////////////////////////////////////////
 
+
 ////////////////////////////////////////////////////////////////////
 //     Function: CullableObject::Constructor
 //       Access: Public
@@ -19,9 +20,7 @@
 //               filled in later.
 ////////////////////////////////////////////////////////////////////
 INLINE CullableObject::
-CullableObject() :
-  _fancy(false)
-{
+CullableObject() {
 #ifdef DO_MEMORY_USAGE
   MemoryUsage::update_type(this, get_class_type());
 #endif
@@ -42,8 +41,7 @@ CullableObject(const Geom *geom, const RenderState *state,
   _state(state),
   _net_transform(net_transform),
   _modelview_transform(modelview_transform),
-  _internal_transform(scene_setup->get_cs_transform()->compose(modelview_transform)),
-  _fancy(false)
+  _internal_transform(scene_setup->get_cs_transform()->compose(modelview_transform))
 {
 #ifdef DO_MEMORY_USAGE
   MemoryUsage::update_type(this, get_class_type());
@@ -65,20 +63,17 @@ CullableObject(const Geom *geom, const RenderState *state,
   _state(state),
   _net_transform(net_transform),
   _modelview_transform(modelview_transform),
-  _internal_transform(internal_transform),
-  _fancy(false)
+  _internal_transform(internal_transform)
 {
 #ifdef DO_MEMORY_USAGE
   MemoryUsage::update_type(this, get_class_type());
 #endif
 }
-  
 
 ////////////////////////////////////////////////////////////////////
 //     Function: CullableObject::Copy Constructor
 //       Access: Public
-//  Description: Copies the CullableObject, but does not copy its
-//               children (decals).
+//  Description: Copies the CullableObject.
 ////////////////////////////////////////////////////////////////////
 INLINE CullableObject::
 CullableObject(const CullableObject &copy) :
@@ -88,8 +83,7 @@ CullableObject(const CullableObject &copy) :
   _state(copy._state),
   _net_transform(copy._net_transform),
   _modelview_transform(copy._modelview_transform),
-  _internal_transform(copy._internal_transform),
-  _fancy(false)
+  _internal_transform(copy._internal_transform)
 {
 #ifdef DO_MEMORY_USAGE
   MemoryUsage::update_type(this, get_class_type());
@@ -99,12 +93,10 @@ CullableObject(const CullableObject &copy) :
 ////////////////////////////////////////////////////////////////////
 //     Function: CullableObject::Copy Assignment Operator
 //       Access: Public
-//  Description: Copies the CullableObject, but does not copy its
-//               children (decals).
+//  Description: Copies the CullableObject.
 ////////////////////////////////////////////////////////////////////
 INLINE void CullableObject::
 operator = (const CullableObject &copy) {
-  nassertv(!_fancy);
   _geom = copy._geom;
   _munger = copy._munger;
   _munged_data = copy._munged_data;
@@ -112,29 +104,7 @@ operator = (const CullableObject &copy) {
   _net_transform = copy._net_transform;
   _modelview_transform = copy._modelview_transform;
   _internal_transform = copy._internal_transform;
-}
-
-////////////////////////////////////////////////////////////////////
-//     Function: CullableObject::is_fancy
-//       Access: Public
-//  Description: Returns true if the object has something fancy to it:
-//               decals, maybe, or a draw_callback, that prevents it
-//               from being rendered inline.
-////////////////////////////////////////////////////////////////////
-INLINE bool CullableObject::
-is_fancy() const {
-  return _fancy;
-}
-
-////////////////////////////////////////////////////////////////////
-//     Function: CullableObject::has_decals
-//       Access: Public
-//  Description: Returns true if the object has decals associated with
-//               it.
-////////////////////////////////////////////////////////////////////
-INLINE bool CullableObject::
-has_decals() const {
-  return _fancy && (_next != (CullableObject *)NULL);
+  _draw_callback = copy._draw_callback;
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -146,8 +116,17 @@ has_decals() const {
 ////////////////////////////////////////////////////////////////////
 INLINE void CullableObject::
 draw(GraphicsStateGuardianBase *gsg, bool force, Thread *current_thread) {
-  if (_fancy) {
-    draw_fancy(gsg, force, current_thread);
+  if (_draw_callback != (CallbackObject *)NULL) {
+    // It has a callback associated.
+    gsg->clear_before_callback();
+    gsg->set_state_and_transform(_state, _internal_transform);
+    GeomDrawCallbackData cbdata(this, gsg, force);
+    _draw_callback->do_callback(&cbdata);
+    if (cbdata.get_lost_state()) {
+      // Tell the GSG to forget its state.
+      gsg->clear_state_and_transform();
+    }
+    // Now the callback has taken care of drawing.
   } else {
     nassertv(_geom != (Geom *)NULL);
     gsg->set_state_and_transform(_state, _internal_transform);
@@ -184,43 +163,7 @@ request_resident() const {
 ////////////////////////////////////////////////////////////////////
 INLINE void CullableObject::
 set_draw_callback(CallbackObject *draw_callback) {
-  make_fancy();
-  if (draw_callback != _draw_callback) {
-    if (_draw_callback != (CallbackObject *)NULL) {
-      unref_delete(_draw_callback);
-    }
-    _draw_callback = draw_callback;
-    if (_draw_callback != (CallbackObject *)NULL) {
-      _draw_callback->ref();
-    }
-  }
-}
-
-////////////////////////////////////////////////////////////////////
-//     Function: CullableObject::set_next
-//       Access: Public
-//  Description: Sets the next object in the decal chain.  This next
-//               object will be destructed when this object destructs.
-////////////////////////////////////////////////////////////////////
-INLINE void CullableObject::
-set_next(CullableObject *next) {
-  make_fancy();
-  nassertv(_next == (CullableObject *)NULL);
-  _next = next;
-}
-
-////////////////////////////////////////////////////////////////////
-//     Function: CullableObject::get_next
-//       Access: Public
-//  Description: Returns the next object in the decal chain, or NULL
-//               for the end of the chain.
-////////////////////////////////////////////////////////////////////
-INLINE CullableObject *CullableObject::
-get_next() const {
-  if (_fancy) {
-    return _next;
-  }
-  return NULL;
+  _draw_callback = draw_callback;
 }
 
 ////////////////////////////////////////////////////////////////////
@@ -233,23 +176,6 @@ flush_level() {
   _sw_sprites_pcollector.flush_level();
 }
 
-////////////////////////////////////////////////////////////////////
-//     Function: CullableObject::make_fancy
-//       Access: Private
-//  Description: Elevates this object to "fancy" status.  This means
-//               that the additional pointers, like _next and
-//               _draw_callback, have meaningful values and should be
-//               examined.
-////////////////////////////////////////////////////////////////////
-INLINE void CullableObject::
-make_fancy() {
-  if (!_fancy) {
-    _fancy = true;
-    _draw_callback = NULL;
-    _next = NULL;
-  }
-}
-
 ////////////////////////////////////////////////////////////////////
 //     Function: CullableObject::draw_inline
 //       Access: Private
@@ -266,7 +192,7 @@ draw_inline(GraphicsStateGuardianBase *gsg, bool force, Thread *current_thread)
 ////////////////////////////////////////////////////////////////////
 //     Function: CullableObject::SortPoints::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE CullableObject::SortPoints::
 SortPoints(const CullableObject::PointData *array) :
@@ -288,7 +214,7 @@ operator () (unsigned short a, unsigned short b) const {
 ////////////////////////////////////////////////////////////////////
 //     Function: CullableObject::SourceFormat::operator <
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 INLINE bool CullableObject::SourceFormat::
 operator < (const CullableObject::SourceFormat &other) const {

+ 4 - 116
panda/src/pgraph/cullableObject.cxx

@@ -30,7 +30,6 @@
 #include "geomTriangles.h"
 #include "light.h"
 #include "lightMutexHolder.h"
-#include "geomDrawCallbackData.h"
 
 CullableObject::FormatMap CullableObject::_format_map;
 LightMutex CullableObject::_format_lock;
@@ -180,41 +179,13 @@ munge_geom(GraphicsStateGuardianBase *gsg,
 #endif
   }
 
-  if (_fancy) {
-    // Only check the _next pointer if the _fancy flag is set.
-    if (_next != (CullableObject *)NULL) {
-      if (_next->_state != (RenderState *)NULL) {
-        _next->munge_geom(gsg, gsg->get_geom_munger(_next->_state, current_thread),
-                          traverser, force);
-      } else {
-        _next->munge_geom(gsg, munger, traverser, force);
-      }
-    }
-  }
-
   return true;
 }
 
-////////////////////////////////////////////////////////////////////
-//     Function: CullableObject::Destructor
-//       Access: Public
-//  Description: Automatically deletes the whole chain of these things.
-////////////////////////////////////////////////////////////////////
-CullableObject::
-~CullableObject() {
-  if (_fancy) {
-    // Only check the _next pointer if the _fancy flag is set.
-    if (_next != (CullableObject *)NULL) {
-      delete _next;
-    }
-    set_draw_callback(NULL);
-  }
-}
-
 ////////////////////////////////////////////////////////////////////
 //     Function: CullableObject::output
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 void CullableObject::
 output(ostream &out) const {
@@ -225,7 +196,6 @@ output(ostream &out) const {
   }
 }
 
-
 ////////////////////////////////////////////////////////////////////
 //     Function: CullableObject::munge_points_to_quads
 //       Access: Private
@@ -642,7 +612,7 @@ munge_texcoord_light_vector(const CullTraverser *traverser, bool force) {
     return true;
   }
 
-  if (!_munged_data->has_column(InternalName::get_vertex()) || 
+  if (!_munged_data->has_column(InternalName::get_vertex()) ||
       !_munged_data->has_column(InternalName::get_normal())) {
     // No vertex or normal; can't compute light vector.
     return true;
@@ -775,97 +745,15 @@ get_flash_hardware_state() {
   return flash_hardware_state;
 }
 
-////////////////////////////////////////////////////////////////////
-//     Function: CullableObject::draw_fancy
-//       Access: Private
-//  Description: Something fancy about this object.  Draw it properly.
-////////////////////////////////////////////////////////////////////
-void CullableObject::
-draw_fancy(GraphicsStateGuardianBase *gsg, bool force, 
-           Thread *current_thread) {
-  nassertv(_fancy);
-  if (_draw_callback != (CallbackObject *)NULL) {
-    // It has a callback associated.
-    gsg->clear_before_callback();
-    gsg->set_state_and_transform(_state, _internal_transform);
-    GeomDrawCallbackData cbdata(this, gsg, force);
-    _draw_callback->do_callback(&cbdata);
-    if (cbdata.get_lost_state()) {
-      // Tell the GSG to forget its state.
-      gsg->clear_state_and_transform();
-    }
-    // Now the callback has taken care of drawing.
-
-  } else if (_next != (CullableObject *)NULL) {
-    // It has decals.
-    draw_with_decals(gsg, force, current_thread);
-
-  } else {
-    // Huh, nothing fancy after all.  Somehow the _fancy flag got set
-    // incorrectly; that's a bug.
-    gsg->set_state_and_transform(_state, _internal_transform);
-    draw_inline(gsg, force, current_thread);
-    nassertv(false);
-  }
-}
-
-////////////////////////////////////////////////////////////////////
-//     Function: CullableObject::draw_with_decals
-//       Access: Private
-//  Description: Draws the current CullableObject, assuming it has
-//               attached decals.
-////////////////////////////////////////////////////////////////////
-void CullableObject::
-draw_with_decals(GraphicsStateGuardianBase *gsg, bool force, 
-                 Thread *current_thread) {
-  nassertv(_fancy && _next != (CullableObject *)NULL);  
-  // We draw with a three-step process.
-
-  // First, render all of the base geometry for the first pass.
-  CPT(RenderState) state = gsg->begin_decal_base_first();
-
-  CullableObject *base = this;
-  while (base != (CullableObject *)NULL && base->_geom != (Geom *)NULL) {
-    gsg->set_state_and_transform(base->_state->compose(state), base->_internal_transform);
-    base->draw_inline(gsg, force, current_thread);
-    
-    base = base->_next;
-  }
-
-  if (base != (CullableObject *)NULL) {
-    // Now, draw all the decals.
-    state = gsg->begin_decal_nested();
-
-    CullableObject *decal = base->_next;
-    while (decal != (CullableObject *)NULL) {
-      gsg->set_state_and_transform(decal->_state->compose(state), decal->_internal_transform);
-      decal->draw_inline(gsg, force, current_thread);
-      decal = decal->_next;
-    }
-  }
-
-  // And now, re-draw the base geometry, if required.
-  state = gsg->begin_decal_base_second();
-  if (state != (const RenderState *)NULL) {
-    base = this;
-    while (base != (CullableObject *)NULL && base->_geom != (Geom *)NULL) {
-      gsg->set_state_and_transform(base->_state->compose(state), base->_internal_transform);
-      base->draw_inline(gsg, force, current_thread);
-      
-      base = base->_next;
-    }
-  }
-}
-
 ////////////////////////////////////////////////////////////////////
 //     Function: CullableObject::SourceFormat::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 CullableObject::SourceFormat::
 SourceFormat(const GeomVertexFormat *format, bool sprite_texcoord) :
   _format(format),
-  _sprite_texcoord(sprite_texcoord) 
+  _sprite_texcoord(sprite_texcoord)
 {
   _retransform_sprites = retransform_sprites;
 }

+ 6 - 24
panda/src/pgraph/cullableObject.h

@@ -32,17 +32,17 @@
 #include "sceneSetup.h"
 #include "lightMutex.h"
 #include "callbackObject.h"
+#include "geomDrawCallbackData.h"
 
 class CullTraverser;
 
 ////////////////////////////////////////////////////////////////////
 //       Class : CullableObject
 // Description : The smallest atom of cull.  This is normally just a
-//               Geom and its associated state, but it also represent
-//               a number of Geoms to be drawn together, with a number
-//               of Geoms decalled onto them.
+//               Geom and its associated state, but it also contain
+//               a draw callback.
 ////////////////////////////////////////////////////////////////////
-class EXPCL_PANDA_PGRAPH CullableObject 
+class EXPCL_PANDA_PGRAPH CullableObject
 #ifdef DO_MEMORY_USAGE
   : public ReferenceCount   // We inherit from ReferenceCount just to get the memory type tracking that MemoryUsage provides.
 #endif  // DO_MEMORY_USAGE
@@ -57,13 +57,10 @@ public:
                         const TransformState *net_transform,
                         const TransformState *modelview_transform,
                         const TransformState *internal_transform);
-    
+
   INLINE CullableObject(const CullableObject &copy);
   INLINE void operator = (const CullableObject &copy);
 
-  INLINE bool is_fancy() const;
-  INLINE bool has_decals() const;
-
   bool munge_geom(GraphicsStateGuardianBase *gsg,
                   GeomMunger *munger, const CullTraverser *traverser,
                   bool force);
@@ -74,11 +71,8 @@ public:
   INLINE static void flush_level();
 
   INLINE void set_draw_callback(CallbackObject *draw_callback);
-  INLINE void set_next(CullableObject *next);
-  INLINE CullableObject *get_next() const;
 
 public:
-  ~CullableObject();
   ALLOC_DELETED_CHAIN(CullableObject);
 
   void output(ostream &out) const;
@@ -91,17 +85,9 @@ public:
   CPT(TransformState) _net_transform;
   CPT(TransformState) _modelview_transform;
   CPT(TransformState) _internal_transform;
+  PT(CallbackObject) _draw_callback;
 
 private:
-  bool _fancy;
-
-  // Fancy things below.  These pointers are only meaningful if
-  // _fancy, above, is true.
-  CallbackObject *_draw_callback;
-  CullableObject *_next;  // for decals
-
-private:
-  INLINE void make_fancy();
   bool munge_points_to_quads(const CullTraverser *traverser, bool force);
   bool munge_texcoord_light_vector(const CullTraverser *traverser, bool force);
 
@@ -110,10 +96,6 @@ private:
 
   INLINE void draw_inline(GraphicsStateGuardianBase *gsg,
                           bool force, Thread *current_thread);
-  void draw_fancy(GraphicsStateGuardianBase *gsg, bool force, 
-                  Thread *current_thread);
-  void draw_with_decals(GraphicsStateGuardianBase *gsg, bool force, 
-                        Thread *current_thread);
 
 private:
   // This class is used internally by munge_points_to_quads().