Browse Source

Merge branch 'master' into cmake

Conflicts:
	panda/src/display/pythonGraphicsWindowProc.cxx
	panda/src/putil/config_util.cxx
Sam Edwards 11 years ago
parent
commit
6ee1170453
100 changed files with 1211 additions and 8625 deletions
  1. 4 0
      .gitignore
  2. 15 0
      .travis.yml
  3. 30 0
      LICENSE
  4. 83 0
      README.md
  5. 4 1
      direct/.gitignore
  6. 0 8
      direct/src/autorestart/Sources.pp
  7. 0 901
      direct/src/autorestart/autorestart.c
  8. 4 0
      direct/src/dcparser/dcPacker.cxx
  9. 1 1
      direct/src/dcparser/dcPacker.h
  10. 4 9
      direct/src/directnotify/Notifier.py
  11. 1 2
      direct/src/directscripts/Doxyfile.python
  12. 3 12
      direct/src/directscripts/packpanda.nsi
  13. 22 182
      direct/src/directscripts/profilepaths-osx.command
  14. 3 17
      direct/src/distributed/cConnectionRepository.h
  15. 5 5
      direct/src/ffi/DoGenPyCode.py
  16. 3 4
      direct/src/filter/CommonFilters.py
  17. 1 1
      direct/src/filter/Sources.pp
  18. 0 29
      direct/src/heapq/Sources.pp
  19. 0 240
      direct/src/heapq/heapq.cxx
  20. 1 1
      direct/src/http/HTMLTree.py
  21. 1 1
      direct/src/http/LandingPage.py
  22. 1 1
      direct/src/http/LandingPageHTML.py
  23. 1 1
      direct/src/http/WebRequest.py
  24. 69 60
      direct/src/p3d/AppRunner.py
  25. 8 8
      direct/src/p3d/DeploymentTools.py
  26. 10 10
      direct/src/p3d/FileSpec.py
  27. 18 19
      direct/src/p3d/HostInfo.py
  28. 2 2
      direct/src/p3d/InstalledHostData.py
  29. 55 56
      direct/src/p3d/PackageInfo.py
  30. 19 19
      direct/src/p3d/PackageInstaller.py
  31. 11 11
      direct/src/p3d/PackageMerger.py
  32. 144 41
      direct/src/p3d/Packager.py
  33. 30 30
      direct/src/p3d/PatchMaker.py
  34. 6 6
      direct/src/p3d/ScanDirectoryNode.py
  35. 4 4
      direct/src/p3d/coreapi.pdef
  36. 6 10
      direct/src/p3d/packp3d.py
  37. 27 4
      direct/src/p3d/panda3d.pdef
  38. 2 2
      direct/src/p3d/pdeploy.py
  39. 3 3
      direct/src/p3d/pmerge.py
  40. 1 1
      direct/src/p3d/ppackage.py
  41. 2 2
      direct/src/p3d/ppatcher.py
  42. 1 1
      direct/src/p3d/runp3d.py
  43. 2 2
      direct/src/p3d/thirdparty.pdef
  44. 2 2
      direct/src/particles/ForceGroup.py
  45. 1 1
      direct/src/particles/ParticleManagerGlobal.py
  46. 29 30
      direct/src/particles/Particles.py
  47. 1 2
      direct/src/particles/SpriteParticleRendererExt.py
  48. 36 77
      direct/src/plugin/p3dPythonRun.cxx
  49. 46 43
      direct/src/plugin/p3dX11SplashWindow.cxx
  50. 0 504
      direct/src/pyinst/Builder.py
  51. 0 0
      direct/src/pyinst/Sources.pp
  52. 0 0
      direct/src/pyinst/__init__.py
  53. 0 246
      direct/src/pyinst/archive.py
  54. 0 226
      direct/src/pyinst/archive_rt.py
  55. 0 81
      direct/src/pyinst/archivebuilder.py
  56. 0 169
      direct/src/pyinst/bindepend.py
  57. 0 204
      direct/src/pyinst/carchive.py
  58. 0 157
      direct/src/pyinst/carchive_rt.py
  59. 0 178
      direct/src/pyinst/finder.py
  60. 0 138
      direct/src/pyinst/icon.py
  61. 0 487
      direct/src/pyinst/imputil.py
  62. 0 91
      direct/src/pyinst/installutils.py
  63. 0 85
      direct/src/pyinst/ltoc.py
  64. 0 42
      direct/src/pyinst/mkarchive.py
  65. 0 436
      direct/src/pyinst/modulefinder.py
  66. 0 317
      direct/src/pyinst/resource.py
  67. 0 131
      direct/src/pyinst/tocfilter.py
  68. 0 557
      direct/src/showbase/BpDb.py
  69. 1 1
      direct/src/showbase/BufferViewer.py
  70. 0 4
      direct/src/showbase/DirectObject.py
  71. 0 1251
      direct/src/showbase/ElementTree.py
  72. 8 36
      direct/src/showbase/EventManager.py
  73. 3 3
      direct/src/showbase/ExceptionVarDump.py
  74. 1 1
      direct/src/showbase/Job.py
  75. 5 75
      direct/src/showbase/Messenger.py
  76. 0 12
      direct/src/showbase/PandaObject.py
  77. 1 1
      direct/src/showbase/PhysicsManagerGlobal.py
  78. 18 967
      direct/src/showbase/PythonUtil.py
  79. 71 64
      direct/src/showbase/ShowBase.py
  80. 24 23
      direct/src/showbase/VFSImporter.py
  81. 11 22
      direct/src/showutil/FreezeTool.py
  82. 28 28
      direct/src/stdpy/file.py
  83. 3 3
      direct/src/stdpy/pickle.py
  84. 30 31
      direct/src/stdpy/thread.py
  85. 48 51
      direct/src/stdpy/threading.py
  86. 2 2
      direct/src/stdpy/threading2.py
  87. 15 15
      direct/src/task/Task.py
  88. 0 71
      direct/src/test/ModelScreenShot.py
  89. 0 6
      direct/src/test/ModelScreenShotGlobals.py
  90. 0 0
      direct/src/test/Sources.pp
  91. 0 0
      direct/src/test/__init__.py
  92. 2 5
      doc/Config.pp.sample
  93. 0 1
      doc/INSTALL-MK
  94. 3 8
      doc/INSTALL-PP
  95. 0 33
      doc/README
  96. 21 0
      doc/man/bam-info.1
  97. 31 0
      doc/man/bam2egg.1
  98. 75 0
      doc/man/dae2egg.1
  99. 16 0
      doc/man/dxf-points.1
  100. 72 0
      doc/man/dxf2egg.1

+ 4 - 0
.gitignore

@@ -0,0 +1,4 @@
+/built_x64
+/built
+/samples
+/thirdparty

+ 15 - 0
.travis.yml

@@ -0,0 +1,15 @@
+language: cpp
+compiler:
+  - gcc
+  - clang
+before_script:
+  - sudo apt-get install python-dev libpng-dev zlib1g-dev libssl-dev libx11-dev libgl1-mesa-dev libxrandr-dev libxxf86dga-dev libxcursor-dev bison flex libfreetype6-dev libvorbis-dev libjpeg-dev libopenal-dev libode-dev nvidia-cg-toolkit
+script: python makepanda/makepanda.py --everything --git-commit $TRAVIS_COMMIT --installer
+notifications:
+  irc:
+    channels:
+      - "chat.freenode.net#panda3d"
+    on_success: change
+    on_failure: always
+    use_notice: true
+    skip_join: true

+ 30 - 0
LICENSE

@@ -0,0 +1,30 @@
+Copyright (c) 2008, Carnegie Mellon University.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+3. Neither the name of Carnegie Mellon University nor the names of
+   other contributors may be used to endorse or promote products
+   derived from this software without specific prior written
+   permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHORS "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+(This is the Modified BSD License.  See also
+http://www.opensource.org/licenses/bsd-license.php )

+ 83 - 0
README.md

@@ -0,0 +1,83 @@
+[![Build Status](https://travis-ci.org/panda3d/panda3d.svg?branch=master)](https://travis-ci.org/panda3d/panda3d)
+
+<img src="https://avatars2.githubusercontent.com/u/590956?v=3&s=200" align="right" />
+
+Panda3D
+=======
+
+Panda3D is a game engine, a framework for 3D rendering and game development for
+Python and C++ programs.  Panda3D is open-source and free for any purpose,
+including commercial ventures, thanks to its
+[liberal license](https://www.panda3d.org/license.php).  To learn more about
+Panda3D's capabilities, visit the [gallery](https://www.panda3d.org/gallery.php)
+and the [feature list](https://www.panda3d.org/features.php).  To learn how to
+use Panda3D, check the [documentation](https://www.panda3d.org/documentation.php)
+resources. If you get stuck, ask for help from our active
+[community](https://www.panda3d.org/community.php).
+
+Panda3D is licensed under the Modified BSD License.  See the LICENSE file for
+more details.
+
+Building Panda3D
+================
+
+Windows
+-------
+
+We currently build using the Microsoft Visual C++ 2010 compiler.  You do not
+need Microsoft Visual Studio to build Panda3D, though - the relevant compilers
+are included as part of the Windows 7.1 SDK.
+
+You will also need to have the third-party dependency libraries available for
+the build scripts to use.  These are available from here:
+https://www.panda3d.org/forums/viewtopic.php?f=9&t=16346
+
+After acquiring these dependencies, you may simply build Panda3D from the
+command prompt using the following command:
+
+```bash
+makepanda\makepanda.bat --everything --installer
+```
+
+When the build succeeds, it will produce an .exe file that you can use to
+install Panda3D on your system.
+
+Linux
+-----
+
+Building Panda3D on Linux is easy.  All you need is to invoke the makepanda
+script using the version of Python that you want Panda3D to be built against.
+
+Run makepanda.py with the --help option to see which options are available.
+Usually, you will want to specify the --everything option (which builds with
+support for all features for which it detects the prerequisite dependencies)
+and the --installer option (which produces an installable .deb or .rpm file
+for you to install, depending on your distribution).
+
+The following command illustrates how to build Panda3D with some common
+options:
+```bash
+python2.7 makepanda/makepanda.py --everything --installer --no-egl --no-gles --no-gles2
+```
+
+You will probably see some warnings saying that it's unable to find several
+dependency packages.  You should determine which ones you want to include in
+your build and install the respective development packages.  You may visit
+[this manual page](https://www.panda3d.org/manual/index.php/Dependencies)
+for an overview of the various dependencies.
+
+If you are on Ubuntu, this command should cover the most frequently
+used third-party packages:
+
+```bash
+sudo apt-get install python-dev libpng-dev zlib1g-dev libssl-dev libx11-dev libgl1-mesa-dev libxrandr-dev libxxf86dga-dev libxcursor-dev bison flex libfreetype6-dev libvorbis-dev libjpeg-dev libeigen3-dev libopenal-dev libode-dev libbullet-dev nvidia-cg-toolkit
+```
+
+Once Panda3D has built, you can either install the .deb or .rpm package that
+it produced (if relevant to your platform, and you added --installer).  On
+other systems, you will need to use the installpanda script to install it onto
+your system.  Careful: it is not easy to uninstall Panda3D in this way!
+
+```bash
+python2.7 makepanda/installpanda.py --prefix=/usr/local
+```

+ 4 - 1
direct/.gitignore

@@ -1,5 +1,8 @@
 *.pyc
 /__init__.py
-/built/
+# These are files that are generated within the source tree by the
+# ppremake system.
 Makefile
 pp.dep
+/built/
+Opt?-*

+ 0 - 8
direct/src/autorestart/Sources.pp

@@ -1,8 +0,0 @@
-#begin bin_target
-  // This program only compiles on Unix.
-  #define BUILD_TARGET $[UNIX_PLATFORM]
-  #define C++FLAGS -DWITHIN_PANDA
-
-  #define TARGET autorestart
-  #define SOURCES autorestart.c
-#end bin_target

+ 0 - 901
direct/src/autorestart/autorestart.c

@@ -1,901 +0,0 @@
-/* Filename: autorestart.c
- * Created by:  drose (05Sep02)
- *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- *
- * PANDA 3D SOFTWARE
- * Copyright (c) Carnegie Mellon University.  All rights reserved.
- *
- * All use of this software is subject to the terms of the revised BSD
- * license.  You should have received a copy of this license along
- * with this source code in a file named "LICENSE."
- *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-#ifdef WITHIN_PANDA
-#include "dtoolbase.h"
-#endif
-
-#include <getopt.h>
-#include <stdio.h>
-#include <errno.h>
-#include <string.h>  /* for strerror */
-#include <unistd.h>
-#include <sys/types.h>
-#include <sys/wait.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <time.h>
-#include <signal.h>
-#include <stdlib.h>
-#include <assert.h>
-#include <pwd.h>
-#include <grp.h>
-
-#ifdef HAVE_LIBCURL
-#include <curl/curl.h>
-#endif
-
-/* The maximum number of seconds to wait for a process to go away
-   after issuing SIGTERM.  This is only used in watchdog mode, when -W
-   is provided on the command line. */
-#define MAX_WAITTERM_SEC 10
-
-char **params = NULL;
-char *logfile_name = NULL;
-char *pidfile_name = NULL;
-int dont_fork = 0;
-char *watchdog_url = NULL;
-int watchdog_start_sec = 0;
-int watchdog_cycle_sec = 0;
-int watchdog_timeout_sec = 0;
-char *startup_username = NULL;
-char *startup_groupname = NULL;
-char *startup_chdir = NULL;
-int logfile_fd = -1;
-int stop_on_terminate = 0;
-int stop_always = 0;
-char *respawn_script = NULL;
-int respawn_count_time = 0;
-
-/* If requested, delay these many seconds between restart attempts */
-int respawn_delay_time = 5;
-
-
-/* We shouldn't respawn more than (spam_respawn_count - 1) times over
-   spam_respawn_time seconds. */
-int spam_respawn_count = 5;
-int spam_respawn_time = 60;
-int spam_restart_delay_time = 600;  /* Optionally, do not exit if we spam too much; simply sleep for this many seconds*/
-
-
-
-pid_t child_pid = 0;
-pid_t watchdog_pid = 0;
-
-#define TIME_BUFFER_SIZE 128
-
-/* Keep track of the frequency with which we respawn, so we can report
-   this to our respawn script. */
-typedef struct respawn_record_struct {
-  time_t _time;
-  struct respawn_record_struct *_next;
-} respawn_record;
-
-respawn_record *respawns = NULL;
-
-int
-record_respawn(time_t now) {
-  /* Records the respawning event in the respawn_record, and returns
-     the number of respawns in the last respawn_count_time
-     interval. */
-  respawn_record *rec;
-  respawn_record *next;
-  int count;
-
-  if (respawn_count_time <= 0) {
-    /* We're not tracking respawns if respawn_count_time is 0. */
-    return 0;
-  }
-
-  rec = (respawn_record *)malloc(sizeof(respawn_record));
-  rec->_time = now;
-  rec->_next = respawns;
-  respawns = rec;
-
-  /* Now walk through the rest of the list and count up the number of
-     respawn events until we reach a record more than
-     respawn_count_time seconds old. */
-  count = 0;
-  while (rec->_next != NULL &&
-         (now - rec->_time) <= respawn_count_time) {
-    rec = rec->_next;
-    count++;
-  }
-
-  /* The remaining respawn records get removed. */
-  next = rec->_next;
-  rec->_next = NULL;
-  while (next != NULL) {
-    rec = next;
-    next = rec->_next;
-    free(rec);
-  }
-
-  return count;
-}
-
-void
-invoke_respawn_script(time_t now) {
-  char buffer[32];
-  char *new_command;
-  int new_command_length;
-
-  /* The process is about to be respawned; run the script that we were
-     given on the command line. */
-  if (respawn_count_time <= 0) {
-    /* We're not counting respawn times, so just run the script
-       directly. */
-    system(respawn_script);
-
-  } else {
-    /* We are counting respawn times, so append that information as a
-       parameter to the command. */
-    sprintf(buffer, " %d", record_respawn(now));
-    new_command_length = strlen(respawn_script) + strlen(buffer);
-    new_command = (char *)malloc(new_command_length + 1);
-    strcpy(new_command, respawn_script);
-    strcat(new_command, buffer);
-    assert(strlen(new_command) == new_command_length);
-
-    system(new_command);
-
-    free(new_command);
-  }
-}
-
-/* A callback function passed to libcurl that simply discards the data
-   retrieved from the server.  We only care about the HTTP status. */
-size_t 
-watchdog_bitbucket(void *ptr, size_t size, size_t nmemb, void *userdata) {
-  return size * nmemb;
-}
-
-/* Waits up to timeout_ms for a particular child to terminate.
-   Returns 0 if the timeout expires. */
-pid_t 
-waitpid_timeout(pid_t child_pid, int *status_ptr, int timeout_ms) {
-  pid_t result;
-  struct timeval now, tv;
-  int now_ms, start_ms, elapsed_ms;
-  
-  gettimeofday(&now, NULL);
-  start_ms = now.tv_sec * 1000 + now.tv_usec / 1000;
-    
-  result = waitpid(child_pid, status_ptr, WNOHANG);
-  while (result == 0) {
-    gettimeofday(&now, NULL);
-    now_ms = now.tv_sec * 1000 + now.tv_usec / 1000;
-    elapsed_ms = now_ms - start_ms;
-    
-    if (elapsed_ms > timeout_ms) {
-      /* Tired of waiting. */
-      return 0;
-    }
-    
-    /* Yield the timeslice and wait some more. */
-    tv.tv_sec = 0;
-    tv.tv_usec = 1;
-    select(0, NULL, NULL, NULL, &tv);
-    result = waitpid(child_pid, status_ptr, WNOHANG);
-  }
-  if (result == -1) {
-    perror("waitpid");
-  }
-
-  return result;
-}
-
-
-/* Poll the requested URL until a failure or timeout occurs, or until
-   the child terminates on its own.  Returns 1 on HTTP failure or
-   timeout, 0 on self-termination.  In either case, *status_ptr is
-   filled in with the status value returned by waitpid().*/
-int 
-do_watchdog(int *status_ptr) {
-#ifndef HAVE_LIBCURL
-  fprintf(stderr, "Cannot watchdog; no libcurl available.\n");
-  return 0;
-#else  /* HAVE_LIBCURL */
-
-  CURL *curl;
-  CURLcode res;
-  char error_buffer[CURL_ERROR_SIZE];
-  pid_t wresult;
-
-  // Before we start polling the URL, wait at least start milliseconds.
-  wresult = waitpid_timeout(child_pid, status_ptr, watchdog_start_sec * 1000);
-  if (wresult == child_pid) {
-    // The child terminated on its own before we got started.
-    return 0;
-  }
-
-  curl = curl_easy_init();
-  if (!curl) {
-    fprintf(stderr, "Cannot watchdog; curl failed to init.\n");
-    return 0;
-  }
-
-  curl_easy_setopt(curl, CURLOPT_URL, watchdog_url);
-  /*curl_easy_setopt(curl, CURLOPT_VERBOSE, 1);*/
-  curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, watchdog_timeout_sec * 1000);
-  curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, watchdog_bitbucket);
-  curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, error_buffer);
-  curl_easy_setopt(curl, CURLOPT_USERAGENT, "autorestart");
-  curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
-  curl_easy_setopt(curl, CURLOPT_FRESH_CONNECT, 1);
-  curl_easy_setopt(curl, CURLOPT_FORBID_REUSE, 1);
-
-  res = curl_easy_perform(curl);
-  while (res == 0) {
-    /* 0: The HTTP request finished successfully (but might or might
-       not have returned an error code like a 404). */
-    long http_response = 0;
-    curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &http_response);
-    if ((http_response / 100) != 2) {
-      /* Anything in the 200 range is deemed success.  Anything else
-         is deemed failure. */
-      fprintf(stderr, "%s returned %ld\n", watchdog_url, http_response);
-      break;
-    }
-
-    wresult = waitpid_timeout(child_pid, status_ptr, watchdog_cycle_sec * 1000);
-    if (wresult == child_pid) {
-      /* The process terminated on its own.  Return 0 to indicate this. */
-      return 0;
-    }
-
-    res = curl_easy_perform(curl);
-  }
-
-  curl_easy_cleanup(curl);
-
-  /* Failed to retrieve the watchdog URL. */
-  if (res != 0) {
-    fprintf(stderr, "Failed to contact %s: %s\n", watchdog_url, error_buffer);
-  }
-  
-  /* Kill the child process and wait for it to go away. */
-  kill(child_pid, SIGTERM);
-
-  pid_t result = waitpid_timeout(child_pid, status_ptr, MAX_WAITTERM_SEC * 1000);
-  if (result != child_pid) {
-    if (result == -1) {
-      perror("waitpid");
-    } else {
-      /* SIGTERM didn't make the process die.  Try SIGKILL. */
-      fprintf(stderr, "Force-killing child process\n");
-      kill(child_pid, SIGKILL);
-      result = waitpid_timeout(child_pid, status_ptr, MAX_WAITTERM_SEC * 1000);
-      if (result == -1) {
-        perror("waitpid");
-      }
-    }
-  }
-
-  /* Return 1 to indicate we killed the child due to an HTTP error. */
-  return 1;
-#endif  /* HAVE_LIBCURL */
-}
-
-void
-exec_process() {
-  /* First, output the command line to the log file. */
-  char **p;
-  for (p = params; *p != NULL; ++p) {
-    fprintf(stderr, "%s ", *p);
-  }
-  fprintf(stderr, "\n");
-  execvp(params[0], params);
-  fprintf(stderr, "Cannot exec %s: %s\n", params[0], strerror(errno));
-
-  /* Exit with a status of 0, to indicate to the parent process that
-     we should stop. */
-  exit(0); 
-}
-
-int
-spawn_process() {
-  /* Spawns the child process.  Returns true if the process terminated
-     by itself and should be respawned, false if it was explicitly
-     killed (or some other error condition exists), and it should not
-     respawn any more. */
-  pid_t wresult;
-  int status;
-  int error_exit;
-
-  child_pid = fork();
-  if (child_pid < 0) {
-    /* Fork error. */
-    perror("fork");
-    return 0;
-  }
-
-  if (child_pid == 0) {
-    /* Child.  Exec the process. */
-    fprintf(stderr, "Child pid is %d.\n", getpid());
-    exec_process();
-    /* Shouldn't get here. */
-    exit(1);
-  }
-
-  /* Parent. */
-
-  error_exit = 0;
-
-  if (watchdog_url != NULL) {
-    /* If we're watchdogging, then go check the URL.  This function
-       won't return until the URL fails or the child exits. */
-    error_exit = do_watchdog(&status);
-
-  } else {
-    /* If we're not watchdogging, then just wait for the child to
-       terminate, and diagnose the reason. */
-    wresult = waitpid(child_pid, &status, 0);
-    if (wresult < 0) {
-      perror("waitpid");
-      return 0;
-    }
-  }
-
-  /* Now that we've returned from waitpid, clear the child pid number
-     so our signal handler doesn't get too confused. */
-  child_pid = 0;
-
-  if (error_exit) {
-    /* An HTTP error exit is a reason to respawn. */
-    return 1;
-
-  } else if (WIFSIGNALED(status)) {
-    int signal = WTERMSIG(status);
-    fprintf(stderr, "\nprocess caught signal %d.\n\n", signal);
-    /* A signal exit is a reason to respawn unless the signal is TERM
-       or KILL. */
-    return !stop_on_terminate || (signal != SIGTERM && signal != SIGKILL);
-
-  } else {
-    int exit_status = WEXITSTATUS(status);
-    fprintf(stderr, "\nprocess exited with status %d.\n\n", WEXITSTATUS(status));
-    /* Normal exit is a reason to respawn if the status indicates failure. */
-    return !stop_on_terminate || (exit_status != 0);
-  }
-}
-
-void
-sigterm_handler() {
-  pid_t wresult;
-  int status;
-  time_t now;
-  char time_buffer[TIME_BUFFER_SIZE];
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-
-  fprintf(stderr, "\nsigterm caught at %s; shutting down.\n", time_buffer);
-  if (child_pid == 0) {
-    fprintf(stderr, "no child process.\n\n");
-
-  } else {
-    kill(child_pid, SIGTERM);
-
-    wresult = waitpid(child_pid, &status, 0);
-    if (wresult < 0) {
-      perror("waitpid");
-    } else {
-      fprintf(stderr, "child process terminated.\n\n");
-    }
-  }
-  exit(1);
-}
-
-void
-sighup_handler() {
-  time_t now;
-  char time_buffer[TIME_BUFFER_SIZE];
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-
-  fprintf(stderr, "\nsighup caught at %s.\n", time_buffer);
-  if (child_pid == 0) {
-    fprintf(stderr, "no child process.\n\n");
-
-  } else {
-    kill(child_pid, SIGHUP);
-  }
-}
-
-void 
-sigalarm_handler() {
-  fprintf(stderr, "sleep epoch was complete.\n");
-}
-
-void
-do_autorestart() {
-  char time_buffer[TIME_BUFFER_SIZE];
-  time_t now;
-  time_t *spam_respawn = NULL;
-  int sri, num_sri;
-  struct sigaction sa;
-
-  if (spam_respawn_count > 1) {
-    spam_respawn = (time_t *)malloc(sizeof(time_t) * spam_respawn_count);
-  }
-
-  /* Make our process its own process group. */
-  setpgid(0, 0);
-
-  /* Set up a signal handler to trap SIGTERM. */
-  sa.sa_handler = sigterm_handler;
-  sigemptyset(&sa.sa_mask);
-  sa.sa_flags = 0;
-  if (sigaction(SIGTERM, &sa, NULL) < 0) {
-    perror("sigaction");
-  }
-
-  /* Set up a signal handler to trap SIGHUP.  We pass this into the
-     child. */
-  sa.sa_handler = sighup_handler;
-  sigemptyset(&sa.sa_mask);
-  sa.sa_flags = 0;
-  if (sigaction(SIGHUP, &sa, NULL) < 0) {
-    perror("sigaction");
-  }
-
-  if (logfile_fd >= 0) {
-    /* If we have a logfile, dup it onto stdout and stderr. */
-    dup2(logfile_fd, STDOUT_FILENO);
-    dup2(logfile_fd, STDERR_FILENO);
-    close(logfile_fd);
-  }
-
-  /* Make sure stdin is closed. */
-  close(STDIN_FILENO);
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-  fprintf(stderr, "autorestart begun at %s.\n", time_buffer);
-
-  if (pidfile_name != NULL) {
-    unlink(pidfile_name);
-    FILE *pidfile = fopen(pidfile_name, "w");
-    if (pidfile == NULL) {
-      fprintf(stderr, "Could not write pidfile %s\n", pidfile_name);
-    } else {
-      fprintf(pidfile, "%d\n", getpid());
-      fclose(pidfile);
-    }
-  }
-
-  sri = 1;
-  num_sri = 1;
-  if (spam_respawn_count > 1) {
-    spam_respawn[1] = now;
-  }
-  
-  while (spawn_process()) {
-    now = time(NULL);
-
-    if (respawn_script != NULL) {
-      invoke_respawn_script(now);
-    }
-    
-    if (respawn_delay_time) {
-      sleep(respawn_delay_time);
-    }
-
-    /* Make sure we're not respawning too fast. */
-    if (spam_respawn_count > 1) {
-      sri = (sri + 1) % spam_respawn_count;
-      spam_respawn[sri] = now;
-      if (num_sri < spam_respawn_count) {
-        num_sri++;
-      } else {
-        time_t last = spam_respawn[(sri + 1) % spam_respawn_count];
-        if (now - last < spam_respawn_time) 
-        {
-          if(!spam_restart_delay_time) 
-          {
-            fprintf(stderr, "respawning too fast, giving up.\n");
-            break;
-          } 
-          else 
-          {
-            num_sri = 1; /* reset num_sri */
-            fprintf(stderr, "respawning too fast, will sleep for %d seconds.\n", spam_restart_delay_time);
-            signal (SIGALRM, sigalarm_handler);
-            alarm(spam_restart_delay_time);
-            pause();
-            signal (SIGALRM, SIG_IGN);
-          }
-        }
-      }
-    }
-    
-    if (stop_always) {
-      fprintf(stderr, "instructed to not autorestart, exiting.\n");
-      break;
-    }
-      
-    strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-    fprintf(stderr, "respawning at %s.\n", time_buffer);
-  }
-
-  now = time(NULL);
-  strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
-  fprintf(stderr, "autorestart terminated at %s.\n", time_buffer);
-  exit(0);
-}
-
-void
-double_fork() {
-  pid_t child, grandchild, wresult;
-  int status;
-
-  /* Fork once, then again, to disassociate the child from the command
-     shell process group. */
-  child = fork();
-  if (child < 0) {
-    /* Failure to fork. */
-    perror("fork");
-    exit(1);
-  }
-
-  if (child == 0) {
-    /* Child.  Fork again. */
-    grandchild = fork();
-    if (grandchild < 0) {
-      perror("fork");
-      exit(1);
-    }
-
-    if (grandchild == 0) {
-      /* Grandchild.  Begin useful work. */
-      do_autorestart();
-      /* Shouldn't get here. */
-      exit(1);
-    }
-
-    /* Child.  Report the new pid, then terminate gracefully. */
-    fprintf(stderr, "Spawned, monitoring pid is %d.\n", grandchild);
-    exit(0);
-  }
-
-  /* Parent.  Wait for the child to terminate, then return. */
-  wresult = waitpid(child, &status, 0);
-  if (wresult < 0) {
-    perror("waitpid");
-    exit(1);
-  }
-
-  if (!WIFEXITED(status)) {
-    if (WIFSIGNALED(status)) {
-      fprintf(stderr, "child caught signal %d unexpectedly.\n", WTERMSIG(status));
-    } else {
-      fprintf(stderr, "child exited with status %d.\n", WEXITSTATUS(status));
-    }
-    exit(1);
-  }
-}
-
-void
-usage() {
-  fprintf(stderr,
-          "\n"
-          "autorestart [opts] program [args . . . ]\n"
-          "autorestart -h\n\n");
-}
-
-void
-help() {
-  usage();
-  fprintf(stderr,
-          "This program is used to run a program as a background task and\n"
-          "automatically restart it should it terminate for any reason other\n"
-          "than normal exit or explicit user kill.\n\n"
-
-          "If the program exits with a status of 0, indicating successful\n"
-          "completion, it is not restarted.\n\n"
-
-          "If the program is terminated via a TERM or KILL signal (e.g. via\n"
-          "kill [pid] or kill -9 [pid]), it is assumed the user meant for the\n"
-          "process to stop, and it is not restarted.\n\n"
-
-          "Options:\n\n"
-
-          "  -l logfilename\n"
-          "     Route stdout and stderr from the child process into the indicated\n"
-          "     log file.\n\n"
-
-          "  -p pidfilename\n"
-          "     Write the pid of the monitoring process to the indicated pidfile.\n\n"
-          "  -f\n"
-          "     Don't fork autorestart itself; run it as a foreground process. \n"
-          "     (Normally, autorestart forks itself to run as a background process.)\n"
-          "     In this case, the file named by -p is not used.\n\n"
-          
-          "  -n\n"
-          "     Do not attempt to restart the process under any circumstance.\n"
-          "     The program can still be used to execute a script on abnormal\n"
-          "     process termination.\n\n"
-
-          "  -t\n"
-          "     Stop on terminate: don't restart if the child process exits\n"
-          "     normally or is killed with a SIGTERM.  With this flag, the\n"
-          "     child process will be restarted only if it exits with a\n"
-          "     non-zero exit status, or if it is killed with a signal other\n"
-          "     than SIGTERM.  Without this flag, the default behavior is to\n"
-          "     restart the child process if it exits for any reason.\n\n"
-
-          "  -r count,secs,sleep\n"
-          "     Sleep 'sleep' seconds if the process respawns 'count' times\n"
-          "     within 'secs' seconds.  This is designed to prevent respawning\n"
-          "     from using too many system resources if something is wrong with\n"
-          "     the child process.  The default value is %d,%d,%d. Use -r 0,0,0\n"
-          "     to disable this feature.\n\n"
-
-          "  -s \"command\"\n"
-          "     Run the indicated command or script each time the process is\n"
-          "     respawned, using the system() call.  This may be useful, for\n"
-          "     instance, to notify an operator via email each time a respawn\n"
-          "     occurs.  If -c is also specified, an additional parameter will\n"
-          "     be appended to the command, indicating the number of times the\n"
-          "     respawn has occurred in the given time interval.\n\n"
-
-          "  -c secs\n"
-          "     Specifies the number of seconds over which to count respawn events\n"
-          "     for the purposes of passing an argument to the script named with\n"
-          "     -s.\n\n"
-
-          "  -d secs\n"
-          "     Specifies the number of seconds to delay for between restarts.\n"
-          "     The default is %d.\n\n"
-
-#ifdef HAVE_LIBCURL
-          "  -W watchdog_url,start,cycle,timeout\n"
-          "     Specifies an optional URL to watch while waiting for the process\n"
-          "     to terminate.  If this is specified, autorestart will start the process,\n"
-          "     wait start seconds, and then repeatedly poll the indicated URL\n"
-          "     every cycle seconds.  If a HTTP failure code is detected,\n"
-          "     or no response is received within timeout seconds, then the\n"
-          "     child is terminated and restarted.  The start, cycle, and timeout\n"
-          "     parameters are all required.\n\n"
-#endif  /* HAVE_LIBCURL */
-
-          "  -U username\n"
-          "     Change to the indicated user upon startup.  The logfile is still\n"
-          "     created as the initial user.\n\n"
-
-          "  -G groupname\n"
-          "     Change to the indicated group upon startup.\n\n"
-
-          "  -D dirname\n"
-          "     Change to the indicated working directory upon startup.  The logfile\n"
-          "     is still created relative to the initial startup directory.\n\n"
-
-          "  -h\n"
-          "     Output this help information.\n\n",
-          spam_respawn_count, spam_respawn_time, spam_restart_delay_time, respawn_delay_time);
-}
-
-void
-parse_int_triplet(char *param, int *a, int *b, int *c) {
-  char *comma;
-  char *comma2;
-  
-  comma = strchr(param, ',');
-  if (comma == NULL) {
-    fprintf(stderr, "Comma required: %s\n", param);
-    exit(1);
-  }
-
-  comma2 = strchr(comma+1, ',');
-  if (comma2 == NULL) {
-    fprintf(stderr, "Second comma required: %s\n", param);
-    exit(1);
-  }
-
-  *comma = '\0';
-  *comma2 = '\0';
-  
-  *a = atoi(param);
-  *b = atoi(comma + 1);
-  *c = atoi(comma2 + 1);
-}
-
-void 
-parse_watchdog(char *param) {
-  char *comma;
-  char *comma2;
-  char *comma3;
-
-#ifndef HAVE_LIBCURL
-  fprintf(stderr, "-W requires autorestart to have been compiled with libcurl support.\n");
-  exit(1);
-#endif  /* HAVE_LIBCURL */
-
-  comma = strrchr(param, ',');
-  if (comma == NULL) {
-    fprintf(stderr, "Comma required: %s\n", param);
-    exit(1);
-  }
-  *comma = '\0';
-
-  comma2 = strrchr(param, ',');
-  if (comma2 == NULL) {
-    *comma = ',';
-    fprintf(stderr, "Second comma required: %s\n", param);
-    exit(1);
-  }
-  *comma2 = '\0';
-
-  comma3 = strrchr(param, ',');
-  if (comma3 == NULL) {
-    *comma = ',';
-    *comma2 = ',';
-    fprintf(stderr, "Third comma required: %s\n", param);
-    exit(1);
-  }
-  *comma3 = '\0';
-
-  watchdog_url = param;
-  watchdog_start_sec = atoi(comma3 + 1);
-  watchdog_cycle_sec = atoi(comma2 + 1);
-  watchdog_timeout_sec = atoi(comma + 1);
-}
-
-
-int 
-main(int argc, char *argv[]) {
-  extern char *optarg;
-  extern int optind;
-  /* The initial '+' instructs GNU getopt not to reorder switches. */
-  static const char *optflags = "+l:p:fntr:s:c:d:W:U:G:D:h";
-  int flag;
-
-  flag = getopt(argc, argv, optflags);
-  while (flag != EOF) {
-    switch (flag) {
-    case 'l':
-      logfile_name = optarg;
-      break;
-
-    case 'p':
-      pidfile_name = optarg;
-      break;
-
-    case 'f':
-      dont_fork = 1;
-      break;
-
-    case 'n':
-      stop_always = 1;
-      break;
-
-    case 't':
-      stop_on_terminate = 1;
-      break;
-
-    case 'r':
-      parse_int_triplet(optarg, &spam_respawn_count, &spam_respawn_time, &spam_restart_delay_time);
-      break;
-
-    case 's':
-      respawn_script = optarg;
-      break;
-
-    case 'c':
-      respawn_count_time = atoi(optarg);
-      break;
-
-    case 'd':
-      respawn_delay_time = atoi(optarg);
-      break;
-
-    case 'W':
-      parse_watchdog(optarg);
-      break;
-
-    case 'U':
-      startup_username = optarg;
-      break;
-
-    case 'G':
-      startup_groupname = optarg;
-      break;
-
-    case 'D':
-      startup_chdir = optarg;
-      break;
-      
-    case 'h':
-      help();
-      return 1;
-
-    case '?':
-    case '+':
-      usage();
-      return 1;
-
-    default:
-      fprintf(stderr, "Unhandled switch: -%c\n", flag);
-      return 1;
-    }
-    flag = getopt(argc, argv, optflags);
-  }
-
-  argc -= (optind - 1);
-  argv += (optind - 1);
-
-  if (argc < 2) {
-    fprintf(stderr, "No program to execute given.\n");
-    usage();
-    return 1;
-  }
-
-  params = &argv[1];
-
-  if (logfile_name != NULL) {
-    logfile_fd = open(logfile_name, O_WRONLY | O_CREAT | O_TRUNC, 0666);
-    if (logfile_fd < 0) {
-      fprintf(stderr, "Cannot write to logfile %s: %s\n", 
-              logfile_name, strerror(errno));
-      return 1;
-    }
-    fprintf(stderr, "Generating output to %s.\n", logfile_name);
-  }
-
-  if (startup_chdir != NULL) {
-    if (chdir(startup_chdir) != 0) {
-      perror(startup_chdir);
-      return 1;
-    }
-  }
-
-  if (startup_groupname != NULL) {
-    struct group *grp;
-    grp = getgrnam(startup_groupname);
-    if (grp == NULL) {
-      perror(startup_groupname);
-      return 1;
-    }
-
-    if (setgid(grp->gr_gid) != 0) {
-      perror(startup_groupname);
-      return 1;
-    }
-  }
-
-  if (startup_username != NULL) {
-    struct passwd *pwd;
-    pwd = getpwnam(startup_username);
-    if (pwd == NULL) {
-      perror(startup_username);
-      return 1;
-    }
-
-    if (setuid(pwd->pw_uid) != 0) {
-      perror(startup_username);
-      return 1;
-    }
-  }
-
-  if (dont_fork) {
-    do_autorestart();
-  } else {
-    double_fork();
-  }
-
-  return 0;
-}
-

+ 4 - 0
direct/src/dcparser/dcPacker.cxx

@@ -20,6 +20,10 @@
 #include "dcSwitchParameter.h"
 #include "dcClass.h"
 
+#ifdef HAVE_PYTHON
+#include "py_panda.h"
+#endif
+
 DCPacker::StackElement *DCPacker::StackElement::_deleted_chain = NULL;
 int DCPacker::StackElement::_num_ever_allocated = 0;
 

+ 1 - 1
direct/src/dcparser/dcPacker.h

@@ -220,7 +220,7 @@ private:
   const DCPackerCatalog *_catalog;
   const DCPackerCatalog::LiveCatalog *_live_catalog;
 
-  class StackElement {
+  class EXPCL_DIRECT StackElement {
   public:
     // As an optimization, we implement operator new and delete here
     // to minimize allocation overhead during push() and pop().

+ 4 - 9
direct/src/directnotify/Notifier.py

@@ -4,7 +4,7 @@ for the programmer/user
 """
 from LoggerGlobal import defaultLogger
 from direct.showbase import PythonUtil
-from panda3d.core import ConfigVariableBool
+from panda3d.core import ConfigVariableBool, NotifyCategory, StreamWriter, Notify
 import time
 import types
 import sys
@@ -18,9 +18,8 @@ class Notifier:
     # with the C++ notify system.
     streamWriter = None
     if ConfigVariableBool('notify-integrate', True):
-        from panda3d.core import StreamWriter, Notify
         streamWriter = StreamWriter(Notify.out(), False)
-        
+
     showTime = ConfigVariableBool('notify-timestamp', False)
 
     def __init__(self, name, logger=None):
@@ -44,9 +43,6 @@ class Notifier:
         self.__warning = 1
         self.__debug = 0
         self.__logging = 0
-        
-        
-
 
     def setServerDelta(self, delta, timezone):
         """
@@ -62,7 +58,6 @@ class Notifier:
         # The following call is necessary to make the output from C++
         # notify messages show the same timestamp as those generated
         # from Python-level notify messages.
-        from pandac.PandaModules import NotifyCategory
         NotifyCategory.setServerDelta(self.serverDelta)
 
         self.info("Notify clock adjusted by %s (and timezone adjusted by %s hours) to synchronize with server." % (PythonUtil.formatElapsedSeconds(delta), (time.timezone - timezone) / 3600))
@@ -92,7 +87,7 @@ class Notifier:
 
     # Severity funcs
     def setSeverity(self, severity):
-        from pandac.PandaModules import NSDebug, NSInfo, NSWarning, NSError
+        from panda3d.core import NSDebug, NSInfo, NSWarning, NSError
         if severity >= NSError:
             self.setWarning(0)
             self.setInfo(0)
@@ -111,7 +106,7 @@ class Notifier:
             self.setDebug(1)
 
     def getSeverity(self):
-        from pandac.PandaModules import NSDebug, NSInfo, NSWarning, NSError
+        from panda3d.core import NSDebug, NSInfo, NSWarning, NSError
         if self.getDebug():
             return NSDebug
         elif self.getInfo():

+ 1 - 2
direct/src/directscripts/Doxyfile.python

@@ -643,8 +643,7 @@ RECURSIVE              = YES
 # excluded from the INPUT source files. This way you can easily exclude a 
 # subdirectory from a directory tree whose root is specified with the INPUT tag.
 
-EXCLUDE                = built/direct/test \
-                         built/direct/plugin \
+EXCLUDE                = built/direct/plugin \
                          built/direct/plugin_npapi \
                          built/direct/plugin_activex \
                          built/direct/plugin_installer \

+ 3 - 12
direct/src/directscripts/packpanda.nsi

@@ -97,20 +97,11 @@ Section "${SMDIRECTORY}" SecCore
         SetOutPath $INSTDIR\etc
         File /r "${PANDACONF}\*"
         SetOutPath $INSTDIR\direct\directscripts
-        !ifdef PPGAME
-        File /r /x CVS /x Opt?-Win32 "${PSOURCE}\direct\directscripts\*"
+        File /r /x CVS /x Opt?-Win32 "${PANDA}\direct\directscripts\*"
         SetOutPath $INSTDIR\direct\filter
-        File /r /x CVS /x Opt?-Win32 "${PSOURCE}\direct\filter\*.sha"
+        File /r /x CVS /x Opt?-Win32 "${PANDA}\direct\filter\*.sha"
         SetOutPath $INSTDIR\direct
-        File /r /x CVS /x Opt?-Win32 "${PSOURCE}\direct\*.py"
-        !else
-        File /r /x CVS /x Opt?-Win32 "${PSOURCE}\direct\src\directscripts\*"
-        SetOutPath $INSTDIR\direct\filter
-        File /r /x CVS /x Opt?-Win32 "${PSOURCE}\direct\src\filter\*.sha"
-        SetOutPath $INSTDIR\direct
-        File /r /x CVS /x Opt?-Win32 "${PSOURCE}\direct\src\*.py"
-        File "${PANDA}\tmp\__init__.py"
-        !endif
+        File /r /x CVS /x Opt?-Win32 "${PANDA}\direct\*.py"
         Delete "$INSTDIR\panda3d.py"
         Delete "$INSTDIR\panda3d.pyc"
         Delete "$INSTDIR\panda3d.pyo"

+ 22 - 182
direct/src/directscripts/profilepaths-osx.command

@@ -26,11 +26,7 @@
 #include "pStatTimer.h"
 
 #ifdef HAVE_PYTHON
-#ifndef CPPPARSER
-#include "py_panda.h"  
-IMPORT_THIS struct   Dtool_PyTypedObject Dtool_DatagramIterator;
-IMPORT_THIS struct   Dtool_PyTypedObject Dtool_DCClass;
-#endif
+#include "py_panda.h"
 #endif
 
 const string CConnectionRepository::_overflow_event_name = "CRDatagramOverflow";
@@ -42,14 +38,13 @@ PStatCollector CConnectionRepository::_update_pcollector("App:Show code:readerPo
 ////////////////////////////////////////////////////////////////////
 //     Function: CConnectionRepository::Constructor
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 CConnectionRepository::
 CConnectionRepository(bool has_owner_view, bool threaded_net) :
   _lock("CConnectionRepository::_lock"),
 #ifdef HAVE_PYTHON
   _python_repository(NULL),
-  _python_ai_datagramiterator(NULL),
 #endif
 #ifdef HAVE_OPENSSL
   _http_conn(NULL),
@@ -82,19 +77,12 @@ CConnectionRepository(bool has_owner_view, bool threaded_net) :
   }
 #endif
   _tcp_header_size = tcp_header_size;
-
-#ifdef HAVE_PYTHON
-  PyObject *  PyDitterator = DTool_CreatePyInstance(&_di,Dtool_DatagramIterator,false,false);
-  if(PyDitterator != NULL)
-      _python_ai_datagramiterator = Py_BuildValue("(O)",PyDitterator);
-#endif
-
 }
 
 ////////////////////////////////////////////////////////////////////
 //     Function: CConnectionRepository::Destructor
 //       Access: Published
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 CConnectionRepository::
 ~CConnectionRepository() {
@@ -182,7 +170,7 @@ try_connect_net(const URLSpec &url) {
 
   disconnect();
 
-  _net_conn = 
+  _net_conn =
     _qcm.open_TCP_client_connection(url.get_server(), url.get_port(),
                                     game_server_timeout_ms);
 
@@ -315,7 +303,7 @@ check_datagram() {
         _msg_channels.push_back(schan);
       }
       _msg_sender = _di.get_uint64();
-      
+
 #ifdef HAVE_PYTHON
       // For now, we need to stuff this field onto the Python
       // structure, to support legacy code that expects to find it
@@ -361,7 +349,7 @@ check_datagram() {
       }
       break;
 #endif  // HAVE_PYTHON
-      
+
     default:
       // Some unknown message; let the caller deal with it.
       return true;
@@ -372,9 +360,6 @@ check_datagram() {
   return false;
 }
 
-
-
-
 ////////////////////////////////////////////////////////////////////
 //     Function: CConnectionRepository::is_connected
 //       Access: Published
@@ -745,7 +730,7 @@ handle_update_field() {
 
   PStatTimer timer(_update_pcollector);
   unsigned int do_id = _di.get_uint32();
-  if (_python_repository != (PyObject *)NULL) 
+  if (_python_repository != (PyObject *)NULL)
   {
     PyObject *doId2do =
       PyObject_GetAttrString(_python_repository, "doId2do");
@@ -794,9 +779,9 @@ handle_update_field() {
       // method might get into trouble if it tried to delete the
       // object from the doId2do map.
       Py_INCREF(distobj);
-      dclass->receive_update(distobj, _di); 
+      dclass->receive_update(distobj, _di);
       Py_DECREF(distobj);
-      
+
       if (PyErr_Occurred()) {
 #if defined(HAVE_THREADS) && !defined(SIMPLE_THREADS)
         PyGILState_Release(gstate);
@@ -810,7 +795,7 @@ handle_update_field() {
 #if defined(HAVE_THREADS) && !defined(SIMPLE_THREADS)
   PyGILState_Release(gstate);
 #endif
-  #endif  // HAVE_PYTHON  
+  #endif  // HAVE_PYTHON
   return true;
 }
 
@@ -880,9 +865,9 @@ handle_update_field_owner() {
         // make a copy of the datagram iterator so that we can use the main
         // iterator for the non-owner update
         DatagramIterator _odi(_di);
-        dclass->receive_update(distobjOV, _odi); 
+        dclass->receive_update(distobjOV, _odi);
         Py_DECREF(distobjOV);
-      
+
         if (PyErr_Occurred()) {
 #if defined(HAVE_THREADS) && !defined(SIMPLE_THREADS)
           PyGILState_Release(gstate);
@@ -919,9 +904,9 @@ handle_update_field_owner() {
         // method might get into trouble if it tried to delete the
         // object from the doId2do map.
         Py_INCREF(distobj);
-        dclass->receive_update(distobj, _di); 
+        dclass->receive_update(distobj, _di);
         Py_DECREF(distobj);
-      
+
         if (PyErr_Occurred()) {
 #if defined(HAVE_THREADS) && !defined(SIMPLE_THREADS)
           PyGILState_Release(gstate);
@@ -935,7 +920,7 @@ handle_update_field_owner() {
 #if defined(HAVE_THREADS) && !defined(SIMPLE_THREADS)
   PyGILState_Release(gstate);
 #endif
-#endif  // HAVE_PYTHON  
+#endif  // HAVE_PYTHON
 
   return true;
 }
@@ -948,17 +933,17 @@ handle_update_field_owner() {
 //               output stream.
 ////////////////////////////////////////////////////////////////////
 void CConnectionRepository::
-describe_message(ostream &out, const string &prefix, 
+describe_message(ostream &out, const string &prefix,
                  const Datagram &dg) const {
   DCPacker packer;
-  
+
   packer.set_unpack_data(dg.get_message());
   CHANNEL_TYPE do_id;
   int msg_type;
   bool is_update = false;
   string full_prefix = "CR::" + prefix;
 
-  if (!_client_datagram) 
+  if (!_client_datagram)
   {
     unsigned char mcnt = packer.raw_unpack_uint8();
     for( ;mcnt > 0; mcnt--)
@@ -967,7 +952,7 @@ describe_message(ostream &out, const string &prefix,
     packer.RAW_UNPACK_CHANNEL();  // msg_sender
     msg_type = packer.raw_unpack_uint16();
     is_update = (msg_type == STATESERVER_OBJECT_UPDATE_FIELD);
-    
+
   } else {
     msg_type = packer.raw_unpack_uint16();
     is_update = (msg_type == CLIENT_OBJECT_UPDATE_FIELD);
@@ -1045,12 +1030,12 @@ describe_message(ostream &out, const string &prefix,
         Py_DECREF(dclass_this);
       }
     }
-    #endif  // HAVE_PYTHON  
+    #endif  // HAVE_PYTHON
 
     int field_id = packer.raw_unpack_uint16();
 
     if (dclass == (DCClass *)NULL) {
-      out << full_prefix << "update for unknown object " << do_id 
+      out << full_prefix << "update for unknown object " << do_id
           << ", field " << field_id << "\n";
 
     } else {
@@ -1059,7 +1044,7 @@ describe_message(ostream &out, const string &prefix,
       DCField *field = dclass->get_field_by_index(field_id);
       if (field == (DCField *)NULL) {
         out << "unknown field " << field_id << "\n";
-        
+
       } else {
         out << field->get_name();
         packer.begin_unpack(field);
@@ -1070,148 +1055,3 @@ describe_message(ostream &out, const string &prefix,
     }
   }
 }
-
-
-
-
-#ifdef HAVE_PYTHON
-#ifdef WANT_NATIVE_NET
-
-bool CConnectionRepository::network_based_reader_and_yielder(PyObject *PycallBackFunction,ClockObject &clock, float returnBy)
-{
-  ReMutexHolder holder(_lock);
-    while(is_connected())
-    {        
-        check_datagram_ai(PycallBackFunction);
-        if(is_connected())
-            _bdc.Flush();
-        float currentTime = clock.get_real_time();
-        float dif_time = returnBy - currentTime;
-        if(dif_time <= 0.001) // to avoi over runs..
-            break;
-        if(is_connected())
-            _bdc.WaitForNetworkReadEvent(dif_time);
-    }
-    return false;
-}
-
-bool CConnectionRepository::check_datagram_ai(PyObject *PycallBackFunction)
-{
-  ReMutexHolder holder(_lock);
-    // these could be static .. not 
-  PyObject *doId2do = NULL; 
-  float startTime =0;
-  float endTime = 0;
-  // this seems weird...here
-  _bdc.Flush();
-  while (_bdc.GetMessage(_dg))
-  { 
-      if (get_verbose()) 
-          describe_message(nout, "RECV", _dg);
-
-      if (_time_warning > 0) 
-        startTime = ClockObject::get_global_clock()->get_real_time();
-
-      // Start breaking apart the datagram.
-      _di.assign(_dg);
-      unsigned char  wc_cnt = _di.get_uint8();
-      _msg_channels.clear();
-      for(unsigned char lp1 = 0; lp1 < wc_cnt; lp1++)
-          _msg_channels.push_back(_di.get_uint64());
-
-      _msg_sender = _di.get_uint64();
-      _msg_type = _di.get_uint16();
-
-      if( _msg_type == STATESERVER_OBJECT_UPDATE_FIELD)
-      {
-          if(doId2do == NULL)
-          {
-              // this is my attemp to take it out of the inner loop  RHH
-              doId2do =PyObject_GetAttrString(_python_repository, "doId2do");
-              nassertr(doId2do != NULL, false);
-          }
-
-          if (!handle_update_field_ai(doId2do)) 
-          {
-              Py_XDECREF(doId2do);
-              if (_time_warning > 0) {
-                endTime = ClockObject::get_global_clock()->get_real_time(); 
-                if ( _time_warning < (endTime - startTime)) {
-                  nout << "msg " << _msg_type <<" from " << _msg_sender << " took "<<  (endTime-startTime) << "secs to process\n";
-                  _dg.dump_hex(nout,2);
-                }
-              }
-              return false; 
-          }
-      }
-      else
-      {
-          PyObject * result = PyEval_CallObject(PycallBackFunction, _python_ai_datagramiterator);
-          if (PyErr_Occurred()) 
-          {        
-              Py_XDECREF(doId2do);
-              if (_time_warning > 0) {
-                endTime = ClockObject::get_global_clock()->get_real_time(); 
-                if ( _time_warning < (endTime - startTime)) {
-                  nout << "msg " << _msg_type <<" from " << _msg_sender << " took "<<  (endTime-startTime) << "secs to process\n";
-                  _dg.dump_hex(nout,2);                
-                }
-              }
-              return true;
-          }
-      }
-
-      if (_time_warning > 0) {
-        endTime = ClockObject::get_global_clock()->get_real_time(); 
-        if ( _time_warning < (endTime - startTime)) {
-          nout << "msg " << _msg_type <<" from " << _msg_sender << " took "<<  (endTime-startTime) << "secs to process\n";
-          _dg.dump_hex(nout,2);   
-        }
-      }
-             
-  }
-
-
-  Py_XDECREF(doId2do);
-  return false;
-}
-
-#endif  // #ifdef WANT_NATIVE_NET
-#endif  // #ifdef HAVE_PYTHON
-
-
-#ifdef HAVE_PYTHON
-#ifdef WANT_NATIVE_NET
-
-
-bool CConnectionRepository::handle_update_field_ai(PyObject *doId2do) 
-{
-  PStatTimer timer(_update_pcollector);
-  unsigned int do_id = _di.get_uint32();
- 
-  PyObject *doId = PyLong_FromUnsignedLong(do_id);
-  PyObject *distobj = PyDict_GetItem(doId2do, doId);
-  Py_DECREF(doId);
-
-  if (distobj != NULL)
-  {
-      PyObject *dclass_obj = PyObject_GetAttrString(distobj, "dclass");
-      nassertr(dclass_obj != NULL, false);
-
-      DCClass *dclass = NULL;
-      DTOOL_Call_ExtractThisPointerForType(dclass_obj, &Dtool_DCClass, (void **) &dclass);
-      if(dclass == NULL)
-          return false;
-
-      Py_INCREF(distobj);
-      dclass->receive_update(distobj, _di); 
-      Py_DECREF(distobj);
-
-      if (PyErr_Occurred()) 
-          return false;
-  }
-  return true;
-}
-
-#endif  // #ifdef WANT_NATIVE_NET
-#endif  // #ifdef HAVE_PYTHON

+ 3 - 17
direct/src/distributed/cConnectionRepository.h

@@ -100,7 +100,7 @@ PUBLISHED:
 #endif
 #ifdef HAVE_NET
   BLOCKING bool try_connect_net(const URLSpec &url);
-  
+
   INLINE QueuedConnectionManager &get_qcm();
   INLINE ConnectionWriter &get_cw();
   INLINE QueuedConnectionReader &get_qcr();
@@ -117,13 +117,7 @@ PUBLISHED:
 #endif
 
   BLOCKING bool check_datagram();
-#ifdef HAVE_PYTHON
-#ifdef WANT_NATIVE_NET
-  BLOCKING bool check_datagram_ai(PyObject *PycallBackFunction);
-  BLOCKING bool network_based_reader_and_yielder(PyObject *PycallBackFunction,ClockObject &clock, float returnBy);
-#endif
-#endif
-    
+
   BLOCKING INLINE void get_datagram(Datagram &dg);
   BLOCKING INLINE void get_datagram_iterator(DatagramIterator &di);
   BLOCKING INLINE CHANNEL_TYPE get_msg_channel(int offset = 0) const;
@@ -167,18 +161,11 @@ PUBLISHED:
   INLINE float get_time_warning() const;
 
 private:
-#ifdef HAVE_PYTHON
-#ifdef WANT_NATIVE_NET
-    bool handle_update_field_ai(PyObject *doId2do);
-#endif
-#endif
-
-
   bool do_check_datagram();
   bool handle_update_field();
   bool handle_update_field_owner();
 
-  void describe_message(ostream &out, const string &prefix, 
+  void describe_message(ostream &out, const string &prefix,
                         const Datagram &dg) const;
 
 private:
@@ -186,7 +173,6 @@ private:
 
 #ifdef HAVE_PYTHON
   PyObject *_python_repository;
-  PyObject *_python_ai_datagramiterator;
 #endif
 
 #ifdef HAVE_OPENSSL

+ 5 - 5
direct/src/ffi/DoGenPyCode.py

@@ -143,7 +143,7 @@ def doGetopts():
             doSqueeze = False
         elif (flag == '-s'):
             deleteSourceAfterSqueeze = False
-            
+
         else:
             FFIConstants.notify.error('illegal option: ' + flag)
 
@@ -168,7 +168,7 @@ def doGetopts():
         if codeLib not in newLibs:
             newLibs.append(codeLib)
     codeLibs = newLibs
-        
+
 
 def doErrorCheck():
     global outputCodeDir
@@ -278,9 +278,9 @@ def generateNativeWrappers():
         # in the runtime (plugin) environment, where all libraries are
         # not necessarily downloaded.
         if sys.version_info >= (3, 0):
-            pandaModules.write('try:\n  from .%s import *\nexcept ImportError as err:\n  if "DLL loader cannot find" not in str(err):\n    raise\n' % (metaModuleName))
+            pandaModules.write('try:\n    from .%s import *\nexcept ImportError as err:\n    if "DLL loader cannot find" not in str(err):\n        raise\n' % (metaModuleName))
         else:
-            pandaModules.write('try:\n  from %s import *\nexcept ImportError, err:\n  if "DLL loader cannot find" not in str(err):\n    raise\n' % (metaModuleName))
+            pandaModules.write('try:\n    from %s import *\nexcept ImportError, err:\n    if "DLL loader cannot find" not in str(err):\n        raise\n' % (metaModuleName))
 
         # Not sure if this message is helpful or annoying.
         #pandaModules.write('  print("Failed to import %s")\n' % (moduleName))
@@ -306,7 +306,7 @@ def generateNativeWrappers():
                     extension = open(extensionFilename, 'r')
                     moduleModules.write(extension.read())
                     moduleModules.write('\n')
-        
+
 
 def run():
     global outputCodeDir

+ 3 - 4
direct/src/filter/CommonFilters.py

@@ -167,7 +167,7 @@ class CommonFilters:
                 auxbits |= AuxBitplaneAttrib.ABOGlow
 
             if ("VolumetricLighting" in configuration):
-                needtex[configuration["VolumetricLighting"].source] = True
+                needtex.add(configuration["VolumetricLighting"].source)
 
             for tex in needtex:
                 self.textures[tex] = Texture("scene-" + tex)
@@ -199,7 +199,7 @@ class CommonFilters:
                 self.ssao[0].setShaderInput("depth", self.textures["depth"])
                 self.ssao[0].setShaderInput("normal", self.textures["aux"])
                 self.ssao[0].setShaderInput("random", loader.loadTexture("maps/random.rgb"))
-                self.ssao[0].setShader(Shader.make(SSAO_BODY % configuration["AmbientOcclusion"].numsamples))
+                self.ssao[0].setShader(Shader.make(SSAO_BODY % configuration["AmbientOcclusion"].numsamples, Shader.SL_Cg))
                 self.ssao[1].setShaderInput("src", ssao0)
                 self.ssao[1].setShader(self.loadShader("filter-blurx.sha"))
                 self.ssao[2].setShaderInput("src", ssao1)
@@ -338,9 +338,8 @@ class CommonFilters:
             if ("Inverted" in configuration):
                 text += "  o_color = float4(1, 1, 1, 1) - o_color;\n"
             text += "}\n"
-            print text
             
-            self.finalQuad.setShader(Shader.make(text))
+            self.finalQuad.setShader(Shader.make(text, Shader.SL_Cg))
             for tex in self.textures:
                 self.finalQuad.setShaderInput("tx"+tex, self.textures[tex])
             

+ 1 - 1
direct/src/filter/Sources.pp

@@ -1,4 +1,4 @@
 
 #defer install_data_dir $[install_lib_dir]/$[PACKAGE]/$[DIRNAME]
-#define INSTALL_DATA filter-bloomi.sha filter-bloomx.sha filter-bloomy.sha filter-blurx.sha filter-blury.sha filter-copy.sha filter-down4.sha filter-ssao.sha
+#define INSTALL_DATA filter-bloomi.sha filter-bloomx.sha filter-bloomy.sha filter-blurx.sha filter-blury.sha filter-copy.sha filter-down4.sha
 

+ 0 - 29
direct/src/heapq/Sources.pp

@@ -1,29 +0,0 @@
-// DIR_TYPE "metalib" indicates we are building a shared library that
-// consists mostly of references to other shared libraries.  Under
-// Windows, this directly produces a DLL (as opposed to the regular
-// src libraries, which don't produce anything but a pile of OBJ files
-// under Windows).
-
-#define DIR_TYPE metalib
-
-// This directory strictly contains a Python utility; therefore, only
-// build it if we actually have Python.
-#define BUILD_DIRECTORY $[HAVE_PYTHON]
-
-
-#define OTHER_LIBS \
-  pandaexpress:m \
-  p3dconfig:c p3dtoolconfig:m \
-  p3dtoolutil:c p3dtoolbase:c p3prc:c p3dtool:m
-
-#begin metalib_target
-  #define TARGET p3heapq
-
-  // Tell ppremake to treat this file as if it had been generated via
-  // interrogate.  On OSX, this will move it into the .so, instead of
-  // the .dylib, so that it can be imported into Python.
-  #define PYTHON_MODULE_ONLY 1
-
-  #define SOURCES heapq.cxx
-#end metalib_target
-

+ 0 - 240
direct/src/heapq/heapq.cxx

@@ -1,240 +0,0 @@
-
-/* Note: This module can probably go away when we upgrade to Python 2.4.
-   Python 2.3 has a heapq implementation, but it is in Python. This is
-   reported to be about 20x faster. In 2.4 they reimplemented heapq in C so
-   it should be comparable to this. At this time though, Python 2.4 is
-   still in alpha.
-   
-   Note: This code has been bastardized to only work on Tasks temporarily.
-
-*/
-
-#include <Python.h>
-
-/* Prototypes */
-static PyObject * heappush(PyObject *self, PyObject *args);
-static PyObject * heappop(PyObject *self, PyObject *args);
-static PyObject * heapreplace(PyObject *self, PyObject *args);
-static PyObject * heapify(PyObject *self, PyObject *args);
-static int _siftdown(PyObject *list, int startpos, int pos);
-static int _siftup(PyObject *list, int pos);
-
-#ifdef _WIN32
-extern "C" __declspec(dllexport) void initlibheapq(void);
-extern "C" __declspec(dllexport) void initlibp3heapq(void);
-#else
-extern "C" void initlibheapq();
-extern "C" void initlibp3heapq();
-#endif
-
-static PyObject *
-heappush(PyObject *self, PyObject *args) {
-    int len;
-    PyObject *list = NULL;
-    PyObject *node = NULL;
-    
-    if (!PyArg_ParseTuple(args,"O!O",&PyList_Type,&list,&node))
-        return NULL;
-
-    len = PyList_Size(list);
-    if (PyList_Append(list,node))
-        return NULL;
-    
-    if (_siftdown(list,0,len))
-        return NULL;
-    
-    Py_INCREF(Py_None);
-    return Py_None;
-}
-
-static PyObject *
-heappop(PyObject *self, PyObject *args) {
-    PyObject *list = NULL;
-    PyObject *node = NULL;
-    PyObject *returnNode = NULL;
-    int len;
-    
-    if (!PyArg_ParseTuple(args,"O!",&PyList_Type,&list))
-        return NULL;
-
-    len = PyList_Size(list);
-    if (len == 0) {
-        /* Special-case most common failure cause */
-        PyErr_SetString(PyExc_IndexError, "pop from empty list");
-        return NULL;
-    }
-
-    node = PySequence_GetItem(list,-1);
-    PySequence_DelItem(list,-1);
-
-    len -= 1;
-    if (len > 0) {
-        returnNode = PySequence_GetItem(list,0);
-        PyList_SetItem(list,0,node);
-        if (_siftup(list,0))
-            return NULL;
-    } else {
-        returnNode = node;
-    }
-    
-    return returnNode;
-}
-
-static PyObject * 
-heapreplace(PyObject *self, PyObject *args) {
-    PyObject *list = NULL;
-    PyObject *node = NULL;
-    PyObject *returnNode = NULL;
-    int len;
-    
-    if (!PyArg_ParseTuple(args,"O!O",&PyList_Type,&list,&node))
-        return NULL;
-
-    len = PyList_Size(list);
-    if (len == 0) {
-        /* Special-case most common failure cause */
-        PyErr_SetString(PyExc_IndexError, "replace on an empty list");
-        return NULL;
-    }
-
-    returnNode = PySequence_GetItem(list,0);
-    PySequence_SetItem(list,0,node);
-    if (_siftup(list,0))
-        return NULL;
-
-    return returnNode;
-}
-
-static PyObject *
-heapify(PyObject *self, PyObject *args) {
-    int n, i;
-    PyObject *list;
-
-    if (!PyArg_ParseTuple(args,"O!",&PyList_Type,&list))
-        return NULL;
-    n = (PyList_Size(list)/2)-1;
-    
-    for (i=n;i>=0;i--) {
-        if (_siftup(list,i))
-            return NULL;
-    }
-
-    Py_INCREF(Py_None);
-    return Py_None;
-}
-
-static int
-_siftdown(PyObject *list, int startpos, int pos) {
-    PyObject *newitem, *parent;
-    int parentpos;
-
-    newitem = PySequence_GetItem(list,pos);
-
-    PyObject *newitem_wakeTime_obj = PyObject_GetAttrString(newitem, "wakeTime");
-    double newitem_wakeTime = 0.0;
-    if (newitem_wakeTime_obj != NULL) {
-      newitem_wakeTime = PyFloat_AS_DOUBLE(newitem_wakeTime_obj);
-      Py_DECREF(newitem_wakeTime_obj);
-    }
-
-    while (pos > startpos) {
-        parentpos = (pos - 1) >> 1;
-        parent = PyList_GetItem(list,parentpos);
-
-        /*
-        cmp = PyObject_RichCompareBool(parent,newitem,Py_LE);
-        if (cmp > 0)
-            break;
-        else if (cmp < 0)
-            return -1;
-        */
-
-        PyObject *parent_wakeTime_obj = PyObject_GetAttrString(parent, "wakeTime");
-        double parent_wakeTime = 0.0;
-        if (parent_wakeTime_obj != NULL) {
-          parent_wakeTime = PyFloat_AS_DOUBLE(parent_wakeTime_obj);
-          Py_DECREF(parent_wakeTime_obj);
-        }
-
-        if (parent_wakeTime <= newitem_wakeTime) {
-          break;
-        }
-
-        Py_INCREF(parent);
-        PyList_SetItem(list,pos,parent);
-        pos = parentpos;
-    }
-    PyList_SetItem(list,pos,newitem);
-    return 0;
-}
-
-static int
-_siftup(PyObject *list, int pos) {
-    PyObject *newitem, *right, *child;
-    int endpos, rightpos, childpos;
-    int startpos = pos;
-    
-    endpos = PyList_Size(list);
-    newitem = PySequence_GetItem(list,pos);
-    
-    childpos = (2*pos)+1;
-    while (childpos < endpos) {
-        rightpos = childpos + 1;
-        child = PySequence_Fast_GET_ITEM(list,childpos);
-
-        PyObject *child_wakeTime_obj = PyObject_GetAttrString(child, "wakeTime");
-        double child_wakeTime = 0.0;
-        if (child_wakeTime_obj != NULL) {
-          child_wakeTime = PyFloat_AS_DOUBLE(child_wakeTime_obj);
-          Py_DECREF(child_wakeTime_obj);
-        }
-
-
-        if (rightpos < endpos) {
-            right = PySequence_Fast_GET_ITEM(list,rightpos);
-
-            PyObject *right_wakeTime_obj = PyObject_GetAttrString(right, "wakeTime");
-            double right_wakeTime = 0.0;
-            if (right_wakeTime_obj != NULL) {
-              right_wakeTime = PyFloat_AS_DOUBLE(right_wakeTime_obj);
-              Py_DECREF(right_wakeTime_obj);
-            }
-
-            /*
-            cmp = PyObject_RichCompareBool(right,child,Py_LE);
-            if (cmp > 0)
-              childpos = rightpos;
-            else if (cmp < 0)
-              return -1;
-            */
-
-            if (right_wakeTime <= child_wakeTime) {
-              childpos = rightpos;
-            }
-        }
-        child = PySequence_GetItem(list,childpos);
-        PyList_SetItem(list,pos,child);
-        pos = childpos;
-        childpos = (2*pos)+1;
-    }
-    PyList_SetItem(list,pos,newitem);
-
-    return _siftdown(list,startpos,pos);
-}
-
-static PyMethodDef heapqcMethods[] = {
-    {"heappush",heappush,METH_VARARGS},
-    {"heappop",heappop,METH_VARARGS},
-    {"heapreplace",heapreplace,METH_VARARGS},
-    {"heapify",heapify,METH_VARARGS},
-    {NULL, NULL} /* Sentinel */
-};
-
-void initlibheapq(void) {
-    (void) Py_InitModule("libheapq", heapqcMethods);
-};
-
-void initlibp3heapq(void) {
-    (void) Py_InitModule("libp3heapq", heapqcMethods);
-};
-

+ 1 - 1
direct/src/showbase/HTMLTree.py → direct/src/http/HTMLTree.py

@@ -1,4 +1,4 @@
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 
 class HTMLTree(ET.ElementTree):
     def __init__(self, title):

+ 1 - 1
direct/src/http/LandingPage.py

@@ -3,7 +3,7 @@ from direct.directnotify.DirectNotifyGlobal import directNotify
 from pandac.PandaModules import VirtualFileSystem
 from pandac.PandaModules import Filename
 from pandac.PandaModules import DSearchPath
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 import LandingPageHTML
 from StringIO import StringIO
 

+ 1 - 1
direct/src/http/LandingPageHTML.py

@@ -1,6 +1,6 @@
 # -- Text content for the landing page.  You should change these for yours! --
 
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 
 title = "Landing Page"
 defaultTitle = title

+ 1 - 1
direct/src/http/WebRequest.py

@@ -4,7 +4,7 @@ from direct.directnotify.DirectNotifyGlobal import directNotify
 from direct.task.TaskManagerGlobal import taskMgr
 from direct.task import Task
 from LandingPage import LandingPage
-from direct.showbase import ElementTree as ET
+import xml.etree.ElementTree as ET
 
 notify = directNotify.newCategory('WebRequestDispatcher')
 

+ 69 - 60
direct/src/p3d/AppRunner.py

@@ -25,16 +25,13 @@ if 'VFSImporter' in sys.modules:
     direct.showbase.VFSImporter = VFSImporter
     sys.modules['direct.showbase.VFSImporter'] = VFSImporter
 else:
-    # Otherwise, we can import the VFSImporter normally.  We have to
-    # import PandaModules first, to get the funny renaming with
-    # pandaexpress.
-    import direct
-    from pandac import PandaModules
+    # Otherwise, we can import the VFSImporter normally.
     from direct.showbase import VFSImporter
 
 from direct.showbase.DirectObject import DirectObject
-from pandac.PandaModules import VirtualFileSystem, Filename, Multifile, loadPrcFileData, unloadPrcFile, getModelPath, Thread, WindowProperties, ExecutionEnvironment, PandaSystem, Notify, StreamWriter, ConfigVariableString, ConfigPageManager, initAppForGui
-from pandac import PandaModules
+from panda3d.core import VirtualFileSystem, Filename, Multifile, loadPrcFileData, unloadPrcFile, getModelPath, Thread, WindowProperties, ExecutionEnvironment, PandaSystem, Notify, StreamWriter, ConfigVariableString, ConfigPageManager
+from panda3d.direct import init_app_for_gui
+from panda3d import core
 from direct.stdpy import file, glob
 from direct.task.TaskManagerGlobal import taskMgr
 from direct.showbase.MessengerGlobal import messenger
@@ -83,7 +80,7 @@ class AppRunner(DirectObject):
 
     # Also from p3d_plugin.h
     P3D_CONTENTS_DEFAULT_MAX_AGE = 5
-    
+
     def __init__(self):
         DirectObject.__init__(self)
 
@@ -119,8 +116,8 @@ class AppRunner(DirectObject):
         self.windowPrc = None
 
         self.http = None
-        if hasattr(PandaModules, 'HTTPClient'):
-            self.http = PandaModules.HTTPClient.getGlobalPtr()
+        if hasattr(core, 'HTTPClient'):
+            self.http = core.HTTPClient.getGlobalPtr()
 
         self.Undefined = Undefined
         self.ConcreteStruct = ConcreteStruct
@@ -213,7 +210,7 @@ class AppRunner(DirectObject):
         # this instance, e.g. the WindowProperties necessary to
         # re-embed a window in the browser frame.
         self.windowProperties = None
-        
+
         # Store our pointer so DirectStart-based apps can find us.
         if AppRunnerGlobal.appRunner is None:
             AppRunnerGlobal.appRunner = self
@@ -263,7 +260,7 @@ class AppRunner(DirectObject):
             value = bool(value)
         return value
 
-        
+
 
     def installPackage(self, packageName, version = None, hostUrl = None):
 
@@ -353,7 +350,7 @@ class AppRunner(DirectObject):
 
         # No shenanigans, just return the requested host.
         return host
-        
+
     def getHost(self, hostUrl, hostDir = None):
         """ Returns a new HostInfo object corresponding to the
         indicated host URL.  If we have already seen this URL
@@ -419,7 +416,7 @@ class AppRunner(DirectObject):
                 # the host directory too.
                 del self.hosts[hostUrl]
                 self.__deleteHostFiles(host)
-                
+
         return packages
 
     def __deleteHostFiles(self, host):
@@ -431,7 +428,7 @@ class AppRunner(DirectObject):
         self.rmtree(host.hostDir)
 
         self.sendRequest('forget_package', host.hostUrl, '', '')
-        
+
 
     def freshenFile(self, host, fileSpec, localPathname):
         """ Ensures that the localPathname is the most current version
@@ -449,18 +446,18 @@ class AppRunner(DirectObject):
         doc = None
         if self.superMirrorUrl:
             # Use the "super mirror" first.
-            url = PandaModules.URLSpec(self.superMirrorUrl + fileSpec.filename)
+            url = core.URLSpec(self.superMirrorUrl + fileSpec.filename)
             self.notify.info("Freshening %s" % (url))
             doc = self.http.getDocument(url)
-            
+
         if not doc or not doc.isValid():
             # Failing the super mirror, contact the actual host.
-            url = PandaModules.URLSpec(host.hostUrlPrefix + fileSpec.filename)
+            url = core.URLSpec(host.hostUrlPrefix + fileSpec.filename)
             self.notify.info("Freshening %s" % (url))
             doc = self.http.getDocument(url)
             if not doc.isValid():
                 return False
-        
+
         file = Filename.temporary('', 'p3d_')
         if not doc.downloadToFile(file):
             # Failed to download.
@@ -522,12 +519,11 @@ class AppRunner(DirectObject):
         """ Reads the config.xml file that may be present in the root
         directory. """
 
-        if not hasattr(PandaModules, 'TiXmlDocument'):
+        if not hasattr(core, 'TiXmlDocument'):
             return
-        from pandac.PandaModules import TiXmlDocument
 
         filename = Filename(self.rootDir, self.ConfigBasename)
-        doc = TiXmlDocument(filename.toOsSpecific())
+        doc = core.TiXmlDocument(filename.toOsSpecific())
         if not doc.LoadFile():
             return
 
@@ -544,7 +540,7 @@ class AppRunner(DirectObject):
         called automatically; an application may call this after
         adjusting some parameters (such as self.maxDiskUsage). """
 
-        from pandac.PandaModules import TiXmlDocument, TiXmlDeclaration, TiXmlElement
+        from panda3d.core import TiXmlDocument, TiXmlDeclaration, TiXmlElement
 
         filename = Filename(self.rootDir, self.ConfigBasename)
         doc = TiXmlDocument(filename.toOsSpecific())
@@ -561,7 +557,7 @@ class AppRunner(DirectObject):
         tfile = Filename.temporary(self.rootDir.cStr(), '.xml')
         if doc.SaveFile(tfile.toOsSpecific()):
             tfile.renameTo(filename)
-        
+
 
     def checkDiskUsage(self):
         """ Checks the total disk space used by all packages, and
@@ -574,11 +570,11 @@ class AppRunner(DirectObject):
                 totalSize += packageData.totalSize
         self.notify.info("Total Panda3D disk space used: %s MB" % (
             (totalSize + 524288) / 1048576))
-        
+
         if self.verifyContents == self.P3DVCNever:
             # We're not allowed to delete anything anyway.
             return
-        
+
         self.notify.info("Configured max usage is: %s MB" % (
             (self.maxDiskUsage + 524288) / 1048576))
         if totalSize <= self.maxDiskUsage:
@@ -592,7 +588,7 @@ class AppRunner(DirectObject):
                 if packageData.package and packageData.package.installed:
                     # Don't uninstall any packages we're currently using.
                     continue
-                
+
                 usedPackages.append((packageData.lastUse, packageData))
 
         # Sort the packages into oldest-first order.
@@ -609,13 +605,13 @@ class AppRunner(DirectObject):
                 packages.append(packageData.package)
             else:
                 # If it's an unknown package, just delete it directly.
-                print "Deleting unknown package %s" % (packageData.pathname)
+                print("Deleting unknown package %s" % (packageData.pathname))
                 self.rmtree(packageData.pathname)
 
         packages = self.deletePackages(packages)
         if packages:
-            print "Unable to delete %s packages" % (len(packages))
-        
+            print("Unable to delete %s packages" % (len(packages)))
+
         return
 
     def stop(self):
@@ -663,10 +659,10 @@ class AppRunner(DirectObject):
             for child in filename.scanDirectory():
                 self.rmtree(Filename(filename, child))
             if not filename.rmdir():
-                print "could not remove directory %s" % (filename)
+                print("could not remove directory %s" % (filename))
         else:
             if not filename.unlink():
-                print "could not delete %s" % (filename)
+                print("could not delete %s" % (filename))
 
     def setSessionId(self, sessionId):
         """ This message should come in at startup. """
@@ -747,10 +743,23 @@ class AppRunner(DirectObject):
             # will properly get ignored by ShowBase.
             self.initialAppImport = True
 
-            __import__(moduleName)
-            main = sys.modules[moduleName]
-            if hasattr(main, 'main') and hasattr(main.main, '__call__'):
-                main.main(self)
+            # Python won't let us import a module named __main__.  So,
+            # we have to do that manually, via the VFSImporter.
+            if moduleName == '__main__':
+                dirName = Filename(self.multifileRoot).toOsSpecific()
+                importer = VFSImporter.VFSImporter(dirName)
+                loader = importer.find_module('__main__')
+                if loader is None:
+                    raise ImportError('No module named __main__')
+
+                mainModule = loader.load_module('__main__')
+            else:
+                __import__(moduleName)
+                mainModule = sys.modules[moduleName]
+
+            # Check if it has a main() function.  If so, call it.
+            if hasattr(mainModule, 'main') and hasattr(mainModule.main, '__call__'):
+                mainModule.main(self)
 
             # Now clear this flag.
             self.initialAppImport = False
@@ -814,7 +823,7 @@ class AppRunner(DirectObject):
 
         # Now that we have rootDir, we can read the config file.
         self.readConfigXml()
-        
+
 
     def addPackageInfo(self, name, platform, version, hostUrl, hostDir = None,
                        recurse = False):
@@ -858,7 +867,7 @@ class AppRunner(DirectObject):
                 # Maybe the contents.xml file isn't current.  Re-fetch it.
                 if host.redownloadContentsFile(self.http):
                     return self.addPackageInfo(name, platform, version, hostUrl, hostDir = hostDir, recurse = True)
-            
+
             message = "Couldn't find %s %s on %s" % (name, version, hostUrl)
             raise OSError, message
 
@@ -866,18 +875,18 @@ class AppRunner(DirectObject):
         if not package.downloadDescFile(self.http):
             message = "Couldn't get desc file for %s" % (name)
             raise OSError, message
-        
+
         if not package.downloadPackage(self.http):
             message = "Couldn't download %s" % (name)
             raise OSError, message
-        
+
         if not package.installPackage(self):
             message = "Couldn't install %s" % (name)
             raise OSError, message
 
         if package.guiApp:
             self.guiApp = True
-            initAppForGui()
+            init_app_for_gui()
 
     def setP3DFilename(self, p3dFilename, tokens, argv, instanceId,
                        interactiveConsole, p3dOffset = 0, p3dUrl = None):
@@ -885,7 +894,7 @@ class AppRunner(DirectObject):
         contains the application itself, along with the web tokens
         and/or command-line arguments.  Once this method has been
         called, the application is effectively started. """
-        
+
         # One day we will have support for multiple instances within a
         # Python session.  Against that day, we save the instance ID
         # for this instance.
@@ -937,9 +946,9 @@ class AppRunner(DirectObject):
         self.allowPythonDev = False
 
         i = mf.findSubfile('p3d_info.xml')
-        if i >= 0 and hasattr(PandaModules, 'readXmlStream'):
+        if i >= 0 and hasattr(core, 'readXmlStream'):
             stream = mf.openReadSubfile(i)
-            self.p3dInfo = PandaModules.readXmlStream(stream)
+            self.p3dInfo = core.readXmlStream(stream)
             mf.closeReadSubfile(stream)
         if self.p3dInfo:
             self.p3dPackage = self.p3dInfo.FirstChildElement('package')
@@ -975,7 +984,7 @@ class AppRunner(DirectObject):
             ConfigVariableString('frame-rate-meter-text-pattern').setValue('allow_python_dev %0.1f fps')
 
         if self.guiApp:
-            initAppForGui()
+            init_app_for_gui()
 
         self.initPackedAppEnvironment()
 
@@ -992,7 +1001,7 @@ class AppRunner(DirectObject):
             # provided if available.  It is only for documentation
             # purposes; the actual p3d file has already been
             # downloaded to p3dFilename.
-            self.p3dUrl = PandaModules.URLSpec(p3dUrl)
+            self.p3dUrl = core.URLSpec(p3dUrl)
 
         # Send this call to the main thread; don't call it directly.
         messenger.send('AppRunner_startIfReady', taskChain = 'default')
@@ -1015,14 +1024,14 @@ class AppRunner(DirectObject):
                     newUrl = xalthost.Attribute('url')
                     self.altHostMap[origUrl] = newUrl
                     break
-                
+
                 xalthost = xalthost.NextSiblingElement('alt_host')
-    
+
     def loadMultifilePrcFiles(self, mf, root):
         """ Loads any prc files in the root of the indicated
         Multifile, which is presumed to have been mounted already
         under root. """
-        
+
         # We have to load these prc files explicitly, since the
         # ConfigPageManager can't directly look inside the vfs.  Use
         # the Multifile interface to find the prc files, rather than
@@ -1046,12 +1055,12 @@ class AppRunner(DirectObject):
                     cp = loadPrcFileData(pathname, data)
                     # Set it to sort value 20, behind the implicit pages.
                     cp.setSort(20)
-        
-    
+
+
     def __clearWindowProperties(self):
         """ Clears the windowPrc file that was created in a previous
         call to setupWindow(), if any. """
-        
+
         if self.windowPrc:
             unloadPrcFile(self.windowPrc)
             self.windowPrc = None
@@ -1127,12 +1136,12 @@ class AppRunner(DirectObject):
         function that can be used to deliver requests upstream, to the
         core API, and thereby to the browser. """
         self.requestFunc = func
-        
+
     def sendRequest(self, request, *args):
         """ Delivers a request to the browser via self.requestFunc.
         This low-level function is not intended to be called directly
         by user code. """
-        
+
         assert self.requestFunc
         return self.requestFunc(self.instanceId, request, args)
 
@@ -1176,7 +1185,7 @@ class AppRunner(DirectObject):
             # Evaluate it now.
             return self.scriptRequest('eval', self.dom, value = expression,
                                       needsResponse = needsResponse)
-        
+
     def scriptRequest(self, operation, object, propertyName = '',
                       value = None, needsResponse = True):
         """ Issues a new script request to the browser.  This queries
@@ -1184,7 +1193,7 @@ class AppRunner(DirectObject):
         low-level method that user code should not call directly;
         instead, just operate on the Python wrapper objects that
         shadow the DOM objects, beginning with appRunner.dom.
-        
+
         operation may be one of [ 'get_property', 'set_property',
         'call', 'evaluate' ].
 
@@ -1233,7 +1242,7 @@ def dummyAppRunner(tokens = [], argv = None):
     first-look sanity check. """
 
     if AppRunnerGlobal.appRunner:
-        print "Already have AppRunner, not creating a new one."
+        print("Already have AppRunner, not creating a new one.")
         return AppRunnerGlobal.appRunner
 
     appRunner = AppRunner()
@@ -1243,7 +1252,7 @@ def dummyAppRunner(tokens = [], argv = None):
     platform = PandaSystem.getPlatform()
     version = PandaSystem.getPackageVersionString()
     hostUrl = PandaSystem.getPackageHostUrl()
-    
+
     if platform.startswith('win'):
         rootDir = Filename(Filename.getUserAppdataDirectory(), 'Panda3D')
     elif platform.startswith('osx'):
@@ -1256,7 +1265,7 @@ def dummyAppRunner(tokens = [], argv = None):
 
     # Of course we will have the panda3d application loaded.
     appRunner.addPackageInfo('panda3d', platform, version, hostUrl)
-        
+
     appRunner.tokens = tokens
     appRunner.tokenDict = dict(tokens)
     if argv is None:
@@ -1274,6 +1283,6 @@ def dummyAppRunner(tokens = [], argv = None):
     vfs.mount(cwd, appRunner.multifileRoot, vfs.MFReadOnly)
 
     appRunner.initPackedAppEnvironment()
-    
+
     return appRunner
 

+ 8 - 8
direct/src/p3d/DeploymentTools.py

@@ -8,9 +8,9 @@ import os, sys, subprocess, tarfile, shutil, time, zipfile, glob, socket, getpas
 from cStringIO import StringIO
 from direct.directnotify.DirectNotifyGlobal import *
 from direct.showbase.AppRunnerGlobal import appRunner
-from pandac.PandaModules import PandaSystem, HTTPClient, Filename, VirtualFileSystem, Multifile
-from pandac.PandaModules import TiXmlDocument, TiXmlDeclaration, TiXmlElement, readXmlStream
-from pandac.PandaModules import PNMImage, PNMFileTypeRegistry
+from panda3d.core import PandaSystem, HTTPClient, Filename, VirtualFileSystem, Multifile
+from panda3d.core import TiXmlDocument, TiXmlDeclaration, TiXmlElement, readXmlStream
+from panda3d.core import PNMImage, PNMFileTypeRegistry
 from direct.stdpy.file import *
 from direct.p3d.HostInfo import HostInfo
 # This is important for some reason
@@ -296,7 +296,7 @@ class Icon:
 
     def __init__(self):
         self.images = {}
-    
+
     def addImage(self, image):
         """ Adds an image to the icon.  Returns False on failure, True on success.
         Only one image per size can be loaded, and the image size must be square. """
@@ -590,7 +590,7 @@ class Installer:
             if package.platform:
                 xpackage.SetAttribute('platform', package.platform)
                 assert package.platform == platform
-            xpackage.SetAttribute('per_platform', '1')
+            xpackage.SetAttribute('per_platform', '0')
             if package.packageVersion:
                 xpackage.SetAttribute('version', version)
                 xpackage.SetAttribute('filename', package.packageName + "/" + package.packageVersion + "/" + package.descFileBasename)
@@ -1097,7 +1097,7 @@ class Installer:
 
         # Tell Vista that we require admin rights
         print >>nsi, 'RequestExecutionLevel admin'
-        print >>nsi 
+        print >>nsi
         if self.offerRun:
             print >>nsi, 'Function launch'
             print >>nsi, '  ExecShell "open" "$INSTDIR\\%s.exe"' % self.shortname
@@ -1112,7 +1112,7 @@ class Installer:
                 print >>nsi, '  CreateShortcut "$DESKTOP\\%s.lnk" "$INSTDIR\\%s.exe" "" "$INSTDIR\\%s.ico"' % (self.fullname, self.shortname, self.shortname)
             print >>nsi, 'FunctionEnd'
             print >>nsi
-            
+
         print >>nsi, '!include "MUI2.nsh"'
         print >>nsi, '!define MUI_ABORTWARNING'
         if self.offerRun:
@@ -1213,7 +1213,7 @@ class Installer:
 
         if icofile is not None:
             icofile.unlink()
-        
+
         return output
 
     def os_walk(self, top):

+ 10 - 10
direct/src/p3d/FileSpec.py

@@ -1,6 +1,6 @@
 import os
 import time
-from pandac.PandaModules import Filename, HashVal, VirtualFileSystem
+from panda3d.core import Filename, HashVal, VirtualFileSystem
 
 class FileSpec:
     """ This class represents a disk file whose hash and size
@@ -18,13 +18,13 @@ class FileSpec:
     def fromFile(self, packageDir, filename, pathname = None, st = None):
         """ Reads the file information from the indicated file.  If st
         is supplied, it is the result of os.stat on the filename. """
-        
+
         vfs = VirtualFileSystem.getGlobalPtr()
 
         filename = Filename(filename)
         if pathname is None:
             pathname = Filename(packageDir, filename)
-        
+
         self.filename = filename.cStr()
         self.basename = filename.getBasename()
 
@@ -41,17 +41,17 @@ class FileSpec:
         hv = HashVal()
         hv.hashFile(pathname)
         self.hash = hv.asHex()
-                 
+
 
     def loadXml(self, xelement):
         """ Reads the file information from the indicated XML
         element. """
-        
+
         self.filename = xelement.Attribute('filename')
         self.basename = None
         if self.filename:
             self.basename = Filename(self.filename).getBasename()
-            
+
         size = xelement.Attribute('size')
         try:
             self.size = int(size)
@@ -87,7 +87,7 @@ class FileSpec:
             xelement.SetAttribute('size', str(self.size))
         if self.hash:
             xelement.SetAttribute('hash', self.hash)
-            
+
     def quickVerify(self, packageDir = None, pathname = None,
                     notify = None, correctSelf = False):
         """ Performs a quick test to ensure the file has not been
@@ -157,8 +157,8 @@ class FileSpec:
             self.__updateTimestamp(pathname, st)
 
         return True
-        
-            
+
+
     def fullVerify(self, packageDir = None, pathname = None, notify = None):
         """ Performs a more thorough test to ensure the file has not
         been modified.  This test is less vulnerable to malicious
@@ -235,7 +235,7 @@ class FileSpec:
         """ Corrects the internal hash to match the one on disk. """
         if not self.actualFile:
             self.checkHash(packageDir, pathname, st)
-            
+
         if notify:
             notify.info("Correcting hash %s to %s" % (
                 self.filename, self.actualFile.hash))

+ 18 - 19
direct/src/p3d/HostInfo.py

@@ -1,7 +1,6 @@
-from pandac.PandaModules import HashVal, Filename, PandaSystem, DocumentSpec, Ramfile
-from pandac.PandaModules import HTTPChannel
-from pandac import PandaModules
-from libpandaexpress import ConfigVariableInt
+from panda3d.core import HashVal, Filename, PandaSystem, DocumentSpec, Ramfile
+from panda3d.core import HTTPChannel, ConfigVariableInt
+from panda3d import core
 from direct.p3d.PackageInfo import PackageInfo
 from direct.p3d.FileSpec import FileSpec
 from direct.directnotify.DirectNotifyGlobal import directNotify
@@ -49,7 +48,7 @@ class HostInfo:
 
         if hostDir and not isinstance(hostDir, Filename):
             hostDir = Filename.fromOsSpecific(hostDir)
-            
+
         self.hostDir = hostDir
         self.asMirror = asMirror
         self.perPlatform = perPlatform
@@ -175,7 +174,7 @@ class HostInfo:
                         statusString = channel.getStatusString()
                         self.notify.warning("Could not contact download server at %s" % (url,))
                         self.notify.warning("Status code = %s %s" % (statusCode, statusString))
-                                    
+
                 if not rf:
                     self.notify.warning("Unable to download %s" % (url,))
                     try:
@@ -206,7 +205,7 @@ class HostInfo:
                         self.notify.warning("%s" % (str(e),))
                         pass
                     return False
-                    
+
         tempFilename = Filename.temporary('', 'p3d_', '.xml')
         if rf:
             f = open(tempFilename.toOsSpecific(), 'wb')
@@ -278,7 +277,7 @@ class HostInfo:
 
         now = int(time.time())
         return now < self.contentsExpiration and self.hasContentsFile
-        
+
     def readContentsFile(self, tempFilename = None, freshDownload = False):
         """ Reads the contents.xml file for this particular host, once
         it has been downloaded into the indicated temporary file.
@@ -290,7 +289,7 @@ class HostInfo:
         there already.  If tempFilename is not specified, the standard
         filename is read if it is known. """
 
-        if not hasattr(PandaModules, 'TiXmlDocument'):
+        if not hasattr(core, 'TiXmlDocument'):
             return False
 
         if not tempFilename:
@@ -304,7 +303,7 @@ class HostInfo:
 
             tempFilename = Filename(hostDir, 'contents.xml')
 
-        doc = PandaModules.TiXmlDocument(tempFilename.toOsSpecific())
+        doc = core.TiXmlDocument(tempFilename.toOsSpecific())
         if not doc.LoadFile():
             return False
 
@@ -337,12 +336,12 @@ class HostInfo:
                 xcontents.RemoveChild(xorig)
                 xorig = xcontents.FirstChildElement('orig')
 
-            xorig = PandaModules.TiXmlElement('orig')
+            xorig = core.TiXmlElement('orig')
             self.contentsSpec.storeXml(xorig)
             xorig.SetAttribute('expiration', str(self.contentsExpiration))
 
             xcontents.InsertEndChild(xorig)
-            
+
         else:
             # Read the download hash and expiration time from the XML.
             expiration = None
@@ -385,7 +384,7 @@ class HostInfo:
                 perPlatform = int(xpackage.Attribute('per_platform') or '')
             except ValueError:
                 perPlatform = False
-                
+
             package = self.__makePackage(name, platform, version, solo, perPlatform)
             package.descFile = FileSpec()
             package.descFile.loadXml(xpackage)
@@ -418,7 +417,7 @@ class HostInfo:
         """ Looks for the <host> or <alt_host> entry in the
         contents.xml that corresponds to the URL that we actually
         downloaded from. """
-        
+
         xhost = xcontents.FirstChildElement('host')
         while xhost:
             url = xhost.Attribute('url')
@@ -433,7 +432,7 @@ class HostInfo:
                     self.readHostXml(xalthost)
                     return
                 xalthost = xalthost.NextSiblingElement('alt_host')
-            
+
             xhost = xhost.NextSiblingElement('host')
 
     def __findHostXmlForHostDir(self, xcontents):
@@ -442,7 +441,7 @@ class HostInfo:
         contents.xml from.  This is used when reading a contents.xml
         file found on disk, as opposed to downloading it from a
         site. """
-        
+
         xhost = xcontents.FirstChildElement('host')
         while xhost:
             url = xhost.Attribute('url')
@@ -463,7 +462,7 @@ class HostInfo:
                     self.readHostXml(xalthost)
                     return
                 xalthost = xalthost.NextSiblingElement('alt_host')
-            
+
             xhost = xhost.NextSiblingElement('host')
 
     def readHostXml(self, xhost):
@@ -487,7 +486,7 @@ class HostInfo:
                 self.downloadUrlPrefix += '/'
         else:
             self.downloadUrlPrefix = self.hostUrlPrefix
-            
+
         xmirror = xhost.FirstChildElement('mirror')
         while xmirror:
             url = xmirror.Attribute('url')
@@ -611,7 +610,7 @@ class HostInfo:
         the list of packages that were NOT found. """
 
         packages = packages[:]
-        
+
         for key, platforms in self.packages.items():
             for platform, package in platforms.items():
                 if package in packages:

+ 2 - 2
direct/src/p3d/InstalledHostData.py

@@ -1,11 +1,11 @@
-from pandac.PandaModules import URLSpec
+from panda3d.core import URLSpec
 
 class InstalledHostData:
     """ A list of instances of this class is returned by
     AppRunner.scanInstalledPackages().  Each of these corresponds to a
     particular host that has provided packages that have been
     installed on the local client. """
-    
+
     def __init__(self, host, dirnode):
         self.host = host
         self.pathname = dirnode.pathname

+ 55 - 56
direct/src/p3d/PackageInfo.py

@@ -1,6 +1,5 @@
-from pandac.PandaModules import Filename, URLSpec, DocumentSpec, Ramfile, Multifile, Decompressor, EUOk, EUSuccess, VirtualFileSystem, Thread, getModelPath, ExecutionEnvironment, PStatCollector, TiXmlDocument, TiXmlDeclaration, TiXmlElement
-from pandac import PandaModules
-from libpandaexpress import ConfigVariableInt
+from panda3d.core import Filename, URLSpec, DocumentSpec, Ramfile, Multifile, Decompressor, EUOk, EUSuccess, VirtualFileSystem, Thread, getModelPath, ExecutionEnvironment, PStatCollector, TiXmlDocument, TiXmlDeclaration, TiXmlElement
+import panda3d.core as core
 from direct.p3d.FileSpec import FileSpec
 from direct.p3d.ScanDirectoryNode import ScanDirectoryNode
 from direct.showbase import VFSImporter
@@ -55,7 +54,7 @@ class PackageInfo:
             tokens.  This function defines a new generator that yields
             each of those tokens, but wraps each call into the nested
             generator within a pair of start/stop collector calls. """
-            
+
             self.pStatCol.start()
             for token in self.__funcPtr(self):
                 self.pStatCol.stop()
@@ -76,7 +75,7 @@ class PackageInfo:
             if self.bytesNeeded == 0:
                 return 1
             return min(float(self.bytesDone) / float(self.bytesNeeded), 1)
-    
+
     def __init__(self, host, packageName, packageVersion, platform = None,
                  solo = False, asMirror = False, perPlatform = False):
         self.host = host
@@ -94,7 +93,7 @@ class PackageInfo:
         # This will be filled in when the host's contents.xml file is
         # read.
         self.packageDir = None
-            
+
         # These will be filled in by HostInfo when the package is read
         # from contents.xml.
         self.descFile = None
@@ -110,11 +109,11 @@ class PackageInfo:
         self.extracts = []
         self.requires = []
         self.installPlans = None
- 
+
         # This is updated during downloadPackage().  It is in the
         # range 0..1.
         self.downloadProgress = 0
-        
+
         # This is set true when the package file has been fully
         # downloaded and unpacked.
         self.hasPackage = False
@@ -133,12 +132,12 @@ class PackageInfo:
         This may not be known until the host's contents.xml file has
         been downloaded, which informs us of the host's own install
         directory. """
-        
+
         if not self.packageDir:
             if not self.host.hasContentsFile:
                 if not self.host.readContentsFile():
                     self.host.downloadContentsFile(self.http)
-            
+
             # Derive the packageDir from the hostDir.
             self.packageDir = Filename(self.host.hostDir, self.packageName)
             if self.packageVersion:
@@ -159,7 +158,7 @@ class PackageInfo:
                 # plugin--and we therefore shouldn't include the
                 # platform in the directory hierarchy.
                 includePlatform = False
-                
+
             if includePlatform and self.platform:
                 self.packageDir = Filename(self.packageDir, self.platform)
 
@@ -176,7 +175,7 @@ class PackageInfo:
         # Return the size of plan A, assuming it will work.
         plan = self.installPlans[0]
         size = sum([step.getEffort() for step in plan])
-        
+
         return size
 
     def getPrevDownloadedEffort(self):
@@ -209,13 +208,13 @@ class PackageInfo:
             name += ' rev %s' % (self.patchVersion)
 
         return name
-        
+
 
     def setupFilenames(self):
         """ This is called by the HostInfo when the package is read
         from contents.xml, to set up the internal filenames and such
         that rely on some of the information from contents.xml. """
-        
+
         dirname, basename = self.descFile.filename.rsplit('/', 1)
         self.descFileDirname = dirname
         self.descFileBasename = basename
@@ -264,7 +263,7 @@ class PackageInfo:
             Thread.considerYield()
 
         return (token == self.stepComplete)
-    
+
     def downloadDescFileGenerator(self, http):
         """ A generator function that implements downloadDescFile()
         one piece at a time.  It yields one of stepComplete,
@@ -343,9 +342,9 @@ class PackageInfo:
 
         filename = Filename(self.getPackageDir(), self.descFileBasename)
 
-        if not hasattr(PandaModules, 'TiXmlDocument'):
+        if not hasattr(core, 'TiXmlDocument'):
             return False
-        doc = PandaModules.TiXmlDocument(filename.toOsSpecific())
+        doc = core.TiXmlDocument(filename.toOsSpecific())
         if not doc.LoadFile():
             return False
 
@@ -432,7 +431,7 @@ class PackageInfo:
         pc.start()
 
         self.hasPackage = False
-        
+
         if self.host.appRunner and self.host.appRunner.verifyContents == self.host.appRunner.P3DVCNever:
             # We're not allowed to download anything.
             self.installPlans = []
@@ -447,12 +446,12 @@ class PackageInfo:
             # archive.
             downloadSize = self.compressedArchive.size
             func = lambda step, fileSpec = self.compressedArchive: self.__downloadFile(step, fileSpec, allowPartial = True)
-            
+
             step = self.InstallStep(func, downloadSize, self.downloadFactor, 'download')
             installPlan = [step]
             self.installPlans = [installPlan]
             pc.stop()
-            return 
+            return
 
         # The normal download process.  Determine what we will need to
         # download, and build a plan (or two) to download it all.
@@ -520,9 +519,9 @@ class PackageInfo:
             # plan B as the only plan.
             self.installPlans = [planB]
 
-        # In case of unexpected failures on the internet, we will retry 
+        # In case of unexpected failures on the internet, we will retry
         # the full download instead of just giving up.
-        for retry in range(ConfigVariableInt('package-full-dl-retries', 1)):
+        for retry in range(core.ConfigVariableInt('package-full-dl-retries', 1)):
             self.installPlans.append(planB[:])
 
         pc.stop()
@@ -530,7 +529,7 @@ class PackageInfo:
     def __scanDirectoryRecursively(self, dirname):
         """ Generates a list of Filename objects: all of the files
         (not directories) within and below the indicated dirname. """
-        
+
         contents = []
         for dirpath, dirnames, filenames in os.walk(dirname.toOsSpecific()):
             dirpath = Filename.fromOsSpecific(dirpath)
@@ -560,7 +559,7 @@ class PackageInfo:
 
         # Get a list of all of the files in the directory, so we can
         # remove files that don't belong.
-        contents = self.__scanDirectoryRecursively(self.getPackageDir()) 
+        contents = self.__scanDirectoryRecursively(self.getPackageDir())
         self.__removeFileFromList(contents, self.descFileBasename)
         self.__removeFileFromList(contents, self.compressedArchive.filename)
         self.__removeFileFromList(contents, self.UsageBasename)
@@ -582,7 +581,7 @@ class PackageInfo:
 
         if self.asMirror:
             return self.compressedArchive.quickVerify(self.getPackageDir(), notify = self.notify)
-            
+
         allExtractsOk = True
         if not self.uncompressedArchive.quickVerify(self.getPackageDir(), notify = self.notify):
             self.notify.debug("File is incorrect: %s" % (self.uncompressedArchive.filename))
@@ -593,7 +592,7 @@ class PackageInfo:
             # shouldn't be a compressed archive file here.
             pathname = Filename(self.getPackageDir(), self.compressedArchive.filename)
             pathname.unlink()
-            
+
             for file in self.extracts:
                 if not file.quickVerify(self.getPackageDir(), notify = self.notify):
                     self.notify.debug("File is incorrect: %s" % (file.filename))
@@ -614,7 +613,7 @@ class PackageInfo:
 
         size = self.totalPlanCompleted + self.currentStepEffort * step.getProgress()
         self.downloadProgress = min(float(size) / float(self.totalPlanSize), 1)
-    
+
     def downloadPackage(self, http):
         """ Downloads the package file, synchronously, then
         uncompresses and unpacks it.  Returns true on success, false
@@ -630,7 +629,7 @@ class PackageInfo:
             Thread.considerYield()
 
         return (token == self.stepComplete)
-    
+
     def downloadPackageGenerator(self, http):
         """ A generator function that implements downloadPackage() one
         piece at a time.  It yields one of stepComplete, stepFailed,
@@ -655,7 +654,7 @@ class PackageInfo:
                 yield token
             else:
                 break
-            
+
         while token == self.restartDownload:
             # Try again.
             for token in self.downloadDescFileGenerator(http):
@@ -675,7 +674,7 @@ class PackageInfo:
 
         assert token == self.stepComplete
         yield self.stepComplete; return
-            
+
 
     def __followInstallPlans(self):
         """ Performs all of the steps in self.installPlans.  Yields
@@ -701,16 +700,16 @@ class PackageInfo:
                         yield token
                     else:
                         break
-                    
+
                 if token == self.restartDownload:
                     yield token
                 if token == self.stepFailed:
                     planFailed = True
                     break
                 assert token == self.stepComplete
-                
+
                 self.totalPlanCompleted += self.currentStepEffort
-                
+
             if not planFailed:
                 # Successfully downloaded!
                 yield self.stepComplete; return
@@ -754,7 +753,7 @@ class PackageInfo:
     def __downloadFile(self, step, fileSpec, urlbase = None, filename = None,
                        allowPartial = False):
         """ Downloads the indicated file from the host into
-        packageDir.  Yields one of stepComplete, stepFailed, 
+        packageDir.  Yields one of stepComplete, stepFailed,
         restartDownload, or stepContinue. """
 
         if self.host.appRunner and self.host.appRunner.verifyContents == self.host.appRunner.P3DVCNever:
@@ -806,7 +805,7 @@ class PackageInfo:
                 url += '?' + str(int(time.time()))
                 request = DocumentSpec(url)
                 request.setCacheControl(DocumentSpec.CCNoCache)
-             
+
             self.notify.info("%s downloading %s" % (self.packageName, url))
 
             if not filename:
@@ -839,7 +838,7 @@ class PackageInfo:
                 targetPathname.makeDir()
                 targetPathname.unlink()
                 channel.beginGetDocument(request)
-                
+
             channel.downloadToFile(targetPathname)
             while channel.run():
                 if step:
@@ -849,7 +848,7 @@ class PackageInfo:
                         # it's the wrong file.
                         self.notify.warning("Got more data than expected for download %s" % (url))
                         break
-                    
+
                     self.__updateStepProgress(step)
 
                 if taskMgr.destroyed:
@@ -857,9 +856,9 @@ class PackageInfo:
                     # be shutting down.  Get out of here.
                     self.notify.warning("Task Manager destroyed, aborting %s" % (url))
                     yield self.stepFailed; return
-                    
+
                 yield self.stepContinue
-                
+
             if step:
                 step.bytesDone = channel.getBytesDownloaded() + channel.getFirstByteDelivered()
                 self.__updateStepProgress(step)
@@ -897,7 +896,7 @@ class PackageInfo:
     def __applyPatch(self, step, patchfile):
         """ Applies the indicated patching in-place to the current
         uncompressed archive.  The patchfile is removed after the
-        operation.  Yields one of stepComplete, stepFailed, 
+        operation.  Yields one of stepComplete, stepFailed,
         restartDownload, or stepContinue. """
 
         self.updated = True
@@ -907,7 +906,7 @@ class PackageInfo:
         result = Filename.temporary('', 'patch_')
         self.notify.info("Patching %s with %s" % (origPathname, patchPathname))
 
-        p = PandaModules.Patchfile()  # The C++ class
+        p = core.Patchfile()  # The C++ class
 
         ret = p.initiate(patchPathname, origPathname, result)
         if ret == EUSuccess:
@@ -925,7 +924,7 @@ class PackageInfo:
             ret = p.run()
         del p
         patchPathname.unlink()
-        
+
         if ret < 0:
             self.notify.warning("Patching of %s failed." % (origPathname))
             result.unlink()
@@ -934,12 +933,12 @@ class PackageInfo:
         if not result.renameTo(origPathname):
             self.notify.warning("Couldn't rename %s to %s" % (result, origPathname))
             yield self.stepFailed; return
-            
+
         yield self.stepComplete; return
 
     def __uncompressArchive(self, step):
         """ Turns the compressed archive into the uncompressed
-        archive.  Yields one of stepComplete, stepFailed, 
+        archive.  Yields one of stepComplete, stepFailed,
         restartDownload, or stepContinue. """
 
         if self.host.appRunner and self.host.appRunner.verifyContents == self.host.appRunner.P3DVCNever:
@@ -970,7 +969,7 @@ class PackageInfo:
 
         if result != EUSuccess:
             yield self.stepFailed; return
-            
+
         step.bytesDone = totalBytes
         self.__updateStepProgress(step)
 
@@ -985,10 +984,10 @@ class PackageInfo:
         # Now we can safely remove the compressed archive.
         sourcePathname.unlink()
         yield self.stepComplete; return
-    
+
     def __unpackArchive(self, step):
         """ Unpacks any files in the archive that want to be unpacked
-        to disk.  Yields one of stepComplete, stepFailed, 
+        to disk.  Yields one of stepComplete, stepFailed,
         restartDownload, or stepContinue. """
 
         if not self.extracts:
@@ -1008,7 +1007,7 @@ class PackageInfo:
         if not mf.openRead(mfPathname):
             self.notify.warning("Couldn't open %s" % (mfPathname))
             yield self.stepFailed; return
-        
+
         allExtractsOk = True
         step.bytesDone = 0
         for file in self.extracts:
@@ -1025,7 +1024,7 @@ class PackageInfo:
                 self.notify.warning("Couldn't extract: %s" % (file.filename))
                 allExtractsOk = False
                 continue
-            
+
             if not file.quickVerify(self.getPackageDir(), notify = self.notify):
                 self.notify.warning("After extracting, still incorrect: %s" % (file.filename))
                 allExtractsOk = False
@@ -1157,7 +1156,7 @@ class PackageInfo:
         """ Marks the package as having been used.  This is normally
         called automatically by installPackage(). """
 
-        if not hasattr(PandaModules, 'TiXmlDocument'):
+        if not hasattr(core, 'TiXmlDocument'):
             return
 
         if self.host.appRunner and self.host.appRunner.verifyContents == self.host.appRunner.P3DVCNever:
@@ -1174,14 +1173,14 @@ class PackageInfo:
         if not doc.LoadFile():
             decl = TiXmlDeclaration("1.0", "utf-8", "")
             doc.InsertEndChild(decl)
-            
+
         xusage = doc.FirstChildElement('usage')
         if not xusage:
             doc.InsertEndChild(TiXmlElement('usage'))
             xusage = doc.FirstChildElement('usage')
 
         now = int(time.time())
-        
+
         count = xusage.Attribute('count_app')
         try:
             count = int(count or '')
@@ -1214,22 +1213,22 @@ class PackageInfo:
         tfile = Filename.temporary(self.getPackageDir().cStr(), '.xml')
         if doc.SaveFile(tfile.toOsSpecific()):
             tfile.renameTo(filename)
-        
+
     def getUsage(self):
         """ Returns the xusage element that is read from the usage.xml
         file, or None if there is no usage.xml file. """
 
-        if not hasattr(PandaModules, 'TiXmlDocument'):
+        if not hasattr(core, 'TiXmlDocument'):
             return None
 
         filename = Filename(self.getPackageDir(), self.UsageBasename)
         doc = TiXmlDocument(filename.toOsSpecific())
         if not doc.LoadFile():
             return None
-            
+
         xusage = doc.FirstChildElement('usage')
         if not xusage:
             return None
 
         return copy.copy(xusage)
-    
+

+ 19 - 19
direct/src/p3d/PackageInstaller.py

@@ -3,7 +3,7 @@ from direct.stdpy.threading import Lock, RLock
 from direct.showbase.MessengerGlobal import messenger
 from direct.task.TaskManagerGlobal import taskMgr
 from direct.p3d.PackageInfo import PackageInfo
-from pandac.PandaModules import TPLow, PStatCollector
+from panda3d.core import TPLow, PStatCollector
 from direct.directnotify.DirectNotifyGlobal import directNotify
 
 class PackageInstaller(DirectObject):
@@ -33,7 +33,7 @@ class PackageInstaller(DirectObject):
     S_ready = 1      # donePackages() has been called
     S_started = 2    # download has started
     S_done = 3       # download is over
-    
+
     class PendingPackage:
         """ This class describes a package added to the installer for
         download. """
@@ -121,12 +121,12 @@ class PackageInstaller(DirectObject):
                 self.prevDownloadedEffort = self.package.getPrevDownloadedEffort()
 
             return True
-            
+
 
         def getDescFile(self, http):
             """ Synchronously downloads the desc files required for
             the package. """
-            
+
             if not self.host.downloadContentsFile(http):
                 return False
 
@@ -156,10 +156,10 @@ class PackageInstaller(DirectObject):
             PackageInstaller.nextUniqueId += 1
         finally:
             self.globalLock.release()
-        
+
         self.appRunner = appRunner
         self.taskChain = taskChain
-        
+
         # If we're to be running on an asynchronous task chain, and
         # the task chain hasn't yet been set up already, create the
         # default parameters now.
@@ -180,7 +180,7 @@ class PackageInstaller(DirectObject):
         # A list of packages that are waiting for their desc files.
         self.needsDescFile = []
         self.descFileTask = None
-        
+
         # A list of packages that are waiting to be downloaded and
         # installed.
         self.needsDownload = []
@@ -198,7 +198,7 @@ class PackageInstaller(DirectObject):
         # This task is spawned on the default task chain, to update
         # the status during the download.
         self.progressTask = None
-        
+
         self.accept('PackageInstaller-%s-allHaveDesc' % self.uniqueId,
                     self.__allHaveDesc)
         self.accept('PackageInstaller-%s-packageStarted' % self.uniqueId,
@@ -231,7 +231,7 @@ class PackageInstaller(DirectObject):
             self.progressTask = None
 
         self.ignoreAll()
-        
+
     def addPackage(self, packageName, version = None, hostUrl = None):
         """ Adds the named package to the list of packages to be
         downloaded.  Call donePackages() to finish the list. """
@@ -256,7 +256,7 @@ class PackageInstaller(DirectObject):
         if pp in self.packages:
             # Already added.
             return
-        
+
         self.packages.append(pp)
 
         # We always add the package to needsDescFile, even if we
@@ -323,7 +323,7 @@ class PackageInstaller(DirectObject):
         (beginning) to 1 (complete). """
 
         self.notify.debug("packageProgress: %s %s" % (package.packageName, progress))
-        
+
     def downloadProgress(self, overallProgress):
         """ This callback is made repeatedly between downloadStarted()
         and downloadFinished() to update the current progress through
@@ -384,7 +384,7 @@ class PackageInstaller(DirectObject):
         """ This method is called internally when all of the pending
         packages have their desc info. """
         working = True
-        
+
         self.packageLock.acquire()
         try:
             if self.state == self.S_ready:
@@ -489,7 +489,7 @@ class PackageInstaller(DirectObject):
         it extracts one package from self.needsDescFile and downloads
         its desc file.  On success, it adds the package to
         self.needsDownload. """
-        
+
         self.packageLock.acquire()
         try:
             # If we've finished all of the packages that need desc
@@ -515,7 +515,7 @@ class PackageInstaller(DirectObject):
         # This package is now ready to be downloaded.  We always add
         # it to needsDownload, even if it's already downloaded, to
         # guarantee ordering of packages.
-        
+
         self.packageLock.acquire()
         try:
             # Also add any packages required by this one.
@@ -527,7 +527,7 @@ class PackageInstaller(DirectObject):
             self.packageLock.release()
 
         return task.cont
-        
+
     def __downloadPackageTask(self, task):
 
         """ This task runs on the aysynchronous task chain; each pass,
@@ -543,7 +543,7 @@ class PackageInstaller(DirectObject):
                     self.packageLock.release()
                     yield task.done; return
 
-                assert self.state == self.S_started        
+                assert self.state == self.S_started
                 pp = self.needsDownload[0]
                 del self.needsDownload[0]
             except:
@@ -578,7 +578,7 @@ class PackageInstaller(DirectObject):
 
             # Continue the loop without yielding, so we pick up the
             # next package within this same frame.
-        
+
     def __donePackage(self, pp, success):
         """ Marks the indicated package as done, either successfully
         or otherwise. """
@@ -630,9 +630,9 @@ class PackageInstaller(DirectObject):
             else:
                 progress = float(currentDownloadSize) / float(downloadEffort)
             self.downloadProgress(progress)
-            
+
         finally:
             self.callbackLock.release()
 
         return task.cont
-    
+

+ 11 - 11
direct/src/p3d/PackageMerger.py

@@ -1,7 +1,7 @@
 from direct.p3d.FileSpec import FileSpec
 from direct.p3d.SeqValue import SeqValue
 from direct.directnotify.DirectNotifyGlobal import *
-from pandac.PandaModules import *
+from panda3d.core import *
 import copy
 import shutil
 import os
@@ -18,11 +18,11 @@ class PackageMerger:
     always the most current version of the file. """
 
     notify = directNotify.newCategory("PackageMerger")
- 
+
     class PackageEntry:
         """ This corresponds to a <package> entry in the contents.xml
         file. """
-        
+
         def __init__(self, xpackage, sourceDir):
             self.sourceDir = sourceDir
             self.loadXml(xpackage)
@@ -48,7 +48,7 @@ class PackageMerger:
             self.descFile.loadXml(xpackage)
 
             self.validatePackageContents()
-            
+
             self.descFile.quickVerify(packageDir = self.sourceDir, notify = PackageMerger.notify, correctSelf = True)
 
             self.packageSeq = SeqValue()
@@ -84,7 +84,7 @@ class PackageMerger:
                 ximport = TiXmlElement('import')
                 self.importDescFile.storeXml(ximport)
                 xpackage.InsertEndChild(ximport)
-            
+
             return xpackage
 
         def validatePackageContents(self):
@@ -170,7 +170,7 @@ class PackageMerger:
             xhost = xcontents.FirstChildElement('host')
             if xhost:
                 self.xhost = xhost.Clone()
-                
+
             xpackage = xcontents.FirstChildElement('package')
             while xpackage:
                 pe = self.PackageEntry(xpackage, sourceDir)
@@ -183,7 +183,7 @@ class PackageMerger:
                     if not other or pe.isNewer(other):
                         # Store this package in the resulting output.
                         self.contents[pe.getKey()] = pe
-                    
+
                 xpackage = xpackage.NextSiblingElement('package')
 
         self.contentsDoc = doc
@@ -247,7 +247,7 @@ class PackageMerger:
             else:
                 # Both the source file and target file are
                 # directories.
-                
+
                 # We have to clean out the target directory first.
                 # Instead of using shutil.rmtree(), remove the files in
                 # this directory one at a time, so we don't inadvertently
@@ -282,10 +282,10 @@ class PackageMerger:
         the current pool.  If packageNames is not None, it is a list
         of package names that we wish to include from the source;
         packages not named in this list will be unchanged. """
-        
+
         if not self.__readContentsFile(sourceDir, packageNames):
             message = "Couldn't read %s" % (sourceDir)
-            raise PackageMergerError, message            
+            raise PackageMergerError, message
 
     def close(self):
         """ Finalizes the results of all of the previous calls to
@@ -302,4 +302,4 @@ class PackageMerger:
 
         self.contentsSeq += 1
         self.__writeContentsFile()
-        
+

+ 144 - 41
direct/src/p3d/Packager.py

@@ -15,6 +15,7 @@ import types
 import getpass
 import platform
 import struct
+import subprocess
 from direct.p3d.FileSpec import FileSpec
 from direct.p3d.SeqValue import SeqValue
 from direct.showbase import Loader
@@ -58,7 +59,7 @@ class Packager:
             self.required = required
 
             if not self.newName:
-                self.newName = self.filename.cStr()
+                self.newName = str(self.filename)
 
             ext = Filename(self.newName).getExtension()
             if ext == 'pz':
@@ -66,7 +67,7 @@ class Packager:
                 # within the Multifile without it.
                 filename = Filename(self.newName)
                 filename.setExtension('')
-                self.newName = filename.cStr()
+                self.newName = str(filename)
                 ext = Filename(self.newName).getExtension()
                 if self.compress is None:
                     self.compress = True
@@ -80,8 +81,8 @@ class Packager:
 
             if self.executable and self.dependencyDir is None:
                 # By default, install executable dependencies in the
-                # same directory with the executable itself.
-                self.dependencyDir = Filename(self.newName).getDirname()
+                # root directory, which is the one that's added to PATH.
+                self.dependencyDir = ''
 
             if self.extract is None:
                 self.extract = self.executable or (ext in packager.extractExtensions)
@@ -93,13 +94,13 @@ class Packager:
 
             if self.executable:
                 # Look up the filename along the system PATH, if necessary.
-                if not self.filename.resolveFilename(packager.executablePath):
+                if not packager.resolveLibrary(self.filename):
                     # If it wasn't found, try looking it up under its
                     # basename only.  Sometimes a Mac user will copy
                     # the library file out of a framework and put that
                     # along the PATH, instead of the framework itself.
                     basename = Filename(self.filename.getBasename())
-                    if basename.resolveFilename(packager.executablePath):
+                    if packager.resolveLibrary(basename):
                         self.filename = basename
 
             if ext in packager.textExtensions and not self.executable:
@@ -156,7 +157,7 @@ class Packager:
             if not self.localOnly:
                 filename = Filename(filename)
                 filename.makeCanonical()
-            self.glob = GlobPattern(filename.cStr())
+            self.glob = GlobPattern(str(filename))
 
             if self.packager.platform.startswith('win'):
                 self.glob.setCaseSensitive(False)
@@ -167,7 +168,7 @@ class Packager:
             if self.localOnly:
                 return self.glob.matches(filename.getBasename())
             else:
-                return self.glob.matches(filename.cStr())
+                return self.glob.matches(str(filename))
 
     class PackageEntry:
         """ This corresponds to a <package> entry in the contents.xml
@@ -586,9 +587,9 @@ class Packager:
                     # (i.e. missing) modules.
                     continue
 
-                if newName == '__main__':
-                    # Ignore this special case.
-                    continue
+                #if newName == '__main__':
+                #    # Ignore this special case.
+                #    continue
 
                 self.moduleNames[newName] = mdef
 
@@ -937,7 +938,7 @@ class Packager:
                         # means we should include the manifest
                         # file itself in the package.
                         newName = Filename(file.dependencyDir, mfile.getBasename())
-                        self.addFile(mfile, newName = newName.cStr(),
+                        self.addFile(mfile, newName = str(newName),
                                      explicit = False, executable = True)
 
                     if afilenames is None and out != 31:
@@ -957,7 +958,7 @@ class Packager:
                     filename.makeTrueCase()
 
                     newName = Filename(file.dependencyDir, filename.getBasename())
-                    self.addFile(filename, newName = newName.cStr(),
+                    self.addFile(filename, newName = str(newName),
                                  explicit = False, executable = True)
 
         def __parseDependenciesWindows(self, tempFile):
@@ -1172,7 +1173,7 @@ class Packager:
                             # It's a fully-specified filename; look
                             # for it under the system root first.
                             if self.packager.systemRoot:
-                                f2 = Filename(self.packager.systemRoot + filename.cStr())
+                                f2 = Filename(self.packager.systemRoot, filename)
                                 if f2.exists():
                                     filename = f2
 
@@ -1181,7 +1182,7 @@ class Packager:
                         continue
 
                     newName = Filename(file.dependencyDir, filename.getBasename())
-                    self.addFile(filename, newName = newName.cStr(),
+                    self.addFile(filename, newName = str(newName),
                                  explicit = False, executable = True)
 
         def __parseDependenciesOSX(self, tempFile):
@@ -1346,6 +1347,7 @@ class Packager:
 
                 # If that failed, perhaps ldd will help us.
                 if filenames is None:
+                    self.notify.warning("Reading ELF library %s failed, using ldd instead" % (file.filename))
                     tempFile = Filename.temporary('', 'p3d_', '.txt')
                     command = 'ldd "%s" >"%s"' % (
                         file.filename.toOsSpecific(),
@@ -1379,7 +1381,7 @@ class Packager:
                     filename.setBinary()
 
                     newName = Filename(file.dependencyDir, filename.getBasename())
-                    self.addFile(filename, newName = newName.cStr(),
+                    self.addFile(filename, newName = str(newName),
                                  explicit = False, executable = True)
 
         def __parseDependenciesPosix(self, tempFile):
@@ -1470,7 +1472,7 @@ class Packager:
             # compatible with older versions of the core API that
             # didn't understand the SF_text flag.
             filename.setBinary()
-            
+
             doc.SaveFile(filename.toOsSpecific())
 
             # It's important not to compress this file: the core API
@@ -1772,8 +1774,6 @@ class Packager:
 
             return xspec
 
-
-
         def addPyFile(self, file):
             """ Adds the indicated python file, identified by filename
             instead of by module name, to the package. """
@@ -1793,6 +1793,18 @@ class Packager:
                 # deal with it again.
                 return
 
+            # Make sure that it is actually in a package.
+            parentName = moduleName
+            while '.' in parentName:
+                parentName = parentName.rsplit('.', 1)[0]
+                if parentName not in self.freezer.modules:
+                    message = 'Cannot add Python file %s; not in package' % (file.newName)
+                    if file.required or file.explicit:
+                        raise StandardError, message
+                    else:
+                        self.notify.warning(message)
+                    return
+
             self.freezer.addModule(moduleName, filename = file.filename)
 
         def addEggFile(self, file):
@@ -1803,7 +1815,7 @@ class Packager:
 
             bamName = Filename(file.newName)
             bamName.setExtension('bam')
-            self.addNode(np.node(), file.filename, bamName.cStr())
+            self.addNode(np.node(), file.filename, str(bamName))
 
         def addBamFile(self, file):
             # Load the bam file so we can massage its textures.
@@ -2207,6 +2219,9 @@ class Packager:
         cvar = ConfigVariableSearchPath('pdef-path')
         self.installSearch = list(map(Filename, cvar.getDirectories()))
 
+        # This is where we cache the location of libraries.
+        self.libraryCache = {}
+
         # The system PATH, for searching dll's and exe's.
         self.executablePath = DSearchPath()
 
@@ -2219,22 +2234,36 @@ class Packager:
         # Now add the actual system search path.
         if self.platform.startswith('win'):
             self.addWindowsSearchPath(self.executablePath, "PATH")
-        elif self.platform.startswith('osx'):
-            self.addPosixSearchPath(self.executablePath, "DYLD_LIBRARY_PATH")
-            self.addPosixSearchPath(self.executablePath, "LD_LIBRARY_PATH")
-            self.addPosixSearchPath(self.executablePath, "PATH")
-            self.executablePath.appendDirectory('/lib')
-            self.executablePath.appendDirectory('/usr/lib')
-            self.executablePath.appendDirectory('/usr/local/lib')
+
         else:
+            if self.platform.startswith('osx'):
+                self.addPosixSearchPath(self.executablePath, "DYLD_LIBRARY_PATH")
+
             self.addPosixSearchPath(self.executablePath, "LD_LIBRARY_PATH")
             self.addPosixSearchPath(self.executablePath, "PATH")
-            self.executablePath.appendDirectory('/lib')
-            self.executablePath.appendDirectory('/usr/lib')
-            self.executablePath.appendDirectory('/usr/local/lib')
 
-        import platform
-        if platform.uname()[1]=="pcbsd":
+            if self.platform.startswith('linux'):
+                # It used to be okay to just add some common paths on Linux.
+                # But nowadays, each distribution has their own convention for
+                # where they put their libraries.  Instead, we query the ldconfig
+                # cache, which contains the location of all libraries.
+
+                if not self.loadLdconfigCache():
+                    # Ugh, failure.  All that remains is to guess.  This should
+                    # work for the most common Debian configurations.
+                    multiarchDir = "/lib/%s-linux-gnu" % (os.uname()[4])
+                    if os.path.isdir(multiarchDir):
+                        self.executablePath.appendDirectory(multiarchDir)
+                    if os.path.isdir("/usr/" + multiarchDir):
+                        self.executablePath.appendDirectory("/usr/" + multiarchDir)
+
+            else:
+                # FreeBSD, or some other system that still makes sense.
+                self.executablePath.appendDirectory('/lib')
+                self.executablePath.appendDirectory('/usr/lib')
+                self.executablePath.appendDirectory('/usr/local/lib')
+
+        if os.uname()[1] == "pcbsd":
             self.executablePath.appendDirectory('/usr/PCBSD/local/lib')
 
         # Set this flag true to automatically add allow_python_dev to
@@ -2388,6 +2417,7 @@ class Packager:
             GlobPattern('libpthread.so*'),
             GlobPattern('libthr.so*'),
             GlobPattern('ld-linux.so*'),
+            GlobPattern('ld-linux-*.so*'),
             ]
 
         # A Loader for loading models.
@@ -2405,6 +2435,60 @@ class Packager:
         # file.
         self.contents = {}
 
+    def loadLdconfigCache(self):
+        """ On GNU/Linux, runs ldconfig -p to find out where all the
+        libraries on the system are located.  Assumes that the platform
+        has already been set. """
+
+        if not os.path.isfile('/sbin/ldconfig'):
+            return False
+
+        handle = subprocess.Popen(['/sbin/ldconfig', '-p'], stdout=subprocess.PIPE)
+        out, err = handle.communicate()
+
+        if handle.returncode != 0:
+            self.notify.warning("/sbin/ldconfig -p returned code %d" %(handle.returncode))
+            return False
+
+        for line in out.splitlines():
+            if '=>' not in line:
+                continue
+
+            prefix, location = line.rsplit('=>', 1)
+            prefix = prefix.strip()
+            location = location.strip()
+
+            if not location or not prefix or ' ' not in prefix:
+                self.notify.warning("Ignoring malformed ldconfig -p line: " + line)
+                continue
+
+            lib, opts = prefix.split(' ', 1)
+            if ('x86-64' in opts) != self.platform.endswith('_amd64'):
+                # This entry isn't meant for our architecture.  I think
+                # x86-64 is the only platform where ldconfig supplies
+                # this extra arch string.
+                continue
+
+            self.libraryCache[lib] = Filename.fromOsSpecific(location)
+
+        return True
+
+    def resolveLibrary(self, filename):
+        """ Resolves the given shared library filename along the executable path,
+        or by cross-referencing it with the library cache. """
+
+        path = str(filename)
+
+        if path in self.libraryCache:
+            filename.setFullpath(self.libraryCache[path].getFullpath())
+            return True
+
+        if filename.resolveFilename(self.executablePath):
+            self.libraryCache[path] = Filename(filename)
+            return True
+
+        return False
+
     def setPlatform(self, platform = None):
         """ Sets the platform that this Packager will compute for.  On
         OSX, this can be used to specify the particular architecture
@@ -3143,9 +3227,17 @@ class Packager:
         for moduleName in args:
             self.currentPackage.freezer.excludeModule(moduleName)
 
+    def do_main(self, filename):
+        """ Includes the indicated file as __main__ module of the application.
+        Also updates mainModule to point to this module. """
+
+        self.addModule(['__main__'], '__main__', filename, required = True)
+        self.currentPackage.mainModule = ('__main__', '__main__')
+
     def do_mainModule(self, moduleName, newName = None, filename = None):
         """ Names the indicated module as the "main" module of the
-        application or exe. """
+        application or exe.  In most cases, you will want to use main()
+        instead. """
 
         if not self.currentPackage:
             raise OutsideOfPackageError
@@ -3162,7 +3254,7 @@ class Packager:
             newFilename = Filename('/'.join(moduleName.split('.')))
             newFilename.setExtension(filename.getExtension())
             self.currentPackage.addFile(
-                filename, newName = newFilename.cStr(),
+                filename, newName = str(newFilename),
                 explicit = True, extract = True, required = True)
 
         self.currentPackage.mainModule = (moduleName, newName)
@@ -3194,7 +3286,9 @@ class Packager:
         # Multifile, so this file can't itself be in the Multifile.
 
         # This requires a bit of care, because we only want to freeze
-        # VFSImporter.py, and not any other part of direct.
+        # VFSImporter.py, and not any other part of direct.  We do
+        # also want panda3d/__init__.py, though, since it would
+        # otherwise be part of the multifile.
         self.do_excludeModule('direct')
 
         # Import the actual VFSImporter module to get its filename on
@@ -3205,7 +3299,7 @@ class Packager:
         self.do_module('VFSImporter', filename = filename)
         self.do_freeze('_vfsimporter', compileToExe = False)
 
-        self.do_file('libpandaexpress.dll');
+        self.do_file('panda3d/core.pyd');
 
         # Now that we're done freezing, explicitly add 'direct' to
         # counteract the previous explicit excludeModule().
@@ -3438,13 +3532,13 @@ class Packager:
                     # build.
                     dllFilename = Filename(filename)
                     dllFilename.setExtension('so')
-                    dllFilename = Filename.dsoFilename(dllFilename.cStr())
+                    dllFilename = Filename.dsoFilename(str(dllFilename))
                     if dllFilename != filename:
                         thisFiles = glob.glob(filename.toOsSpecific())
                         if not thisFiles:
                             # We have to resolve this filename to
                             # determine if it's a _d or not.
-                            if dllFilename.resolveFilename(self.executablePath):
+                            if self.resolveLibrary(dllFilename):
                                 thisFiles = [dllFilename.toOsSpecific()]
                             else:
                                 thisFiles = [filename.toOsSpecific()]
@@ -3455,7 +3549,7 @@ class Packager:
 
         prefix = ''
         if newDir is not None:
-            prefix = Filename(newDir).cStr()
+            prefix = str(Filename(newDir))
             if prefix and prefix[-1] != '/':
                 prefix += '/'
 
@@ -3527,13 +3621,22 @@ class Packager:
         sys.path.append(dirname.toOsSpecific())
         self.__recurseDir(dirname, newDir, unprocessed = unprocessed)
 
-    def __recurseDir(self, filename, newName, unprocessed = None):
+    def __recurseDir(self, filename, newName, unprocessed = None, packageTree = None):
         dirList = vfs.scanDirectory(filename)
         if dirList:
             # It's a directory name.  Recurse.
             prefix = newName
             if prefix and prefix[-1] != '/':
                 prefix += '/'
+
+            # First check if this is a Python package tree.  If so, add it
+            # implicitly as a module.
+            for subfile in dirList:
+                filename = subfile.getFilename()
+                if filename.getBasename() == '__init__.py':
+                    moduleName = newName.replace("/", ".")
+                    self.addModule([moduleName], filename=filename)
+
             for subfile in dirList:
                 filename = subfile.getFilename()
                 self.__recurseDir(filename, prefix + filename.getBasename(),
@@ -3550,7 +3653,7 @@ class Packager:
                 # Strip off an implicit .pz extension.
                 newFilename = Filename(filename)
                 newFilename.setExtension('')
-                newFilename = Filename(newFilename.cStr())
+                newFilename = Filename(str(newFilename))
                 ext = newFilename.getExtension()
 
             if ext in self.knownExtensions:

+ 30 - 30
direct/src/p3d/PatchMaker.py

@@ -1,6 +1,6 @@
 from direct.p3d.FileSpec import FileSpec
 from direct.p3d.SeqValue import SeqValue
-from pandac.PandaModules import *
+from panda3d.core import *
 import copy
 
 class PatchMaker:
@@ -14,7 +14,7 @@ class PatchMaker:
         the package's "version" string; it also corresponds to the
         particular patch version, which increments independently of
         the "version". """
-        
+
         def __init__(self, packageName, platform, version, hostUrl, file):
             self.packageName = packageName
             self.platform = platform
@@ -84,7 +84,7 @@ class PatchMaker:
             apply in sequence, and the packageVersion object
             associated with each patch.  Returns (None, None, None) if
             there is no way to recreate this archive file.  """
-            
+
             if self.tempFile:
                 return (self.tempFile, self, [])
 
@@ -101,7 +101,7 @@ class PatchMaker:
                 # version of some package.
                 package = self.packageCurrent
                 return (Filename(package.packageDir, package.compressedFilename), self, [])
-            
+
             if self.packageBase:
                 # This PackageVersion instance represents the base
                 # (oldest) version of some package.
@@ -144,7 +144,7 @@ class PatchMaker:
                     # Failure trying to decompress the file.
                     return None
                 startFile = startPv.tempFile
-            
+
             if not plan:
                 # If plan is a zero-length list, we're already
                 # here--return startFile.  If plan is None, there's no
@@ -162,7 +162,7 @@ class PatchMaker:
                 if not result:
                     # Failure trying to re-create the file.
                     return None
-                
+
                 pv.tempFile = result
                 prevFile = result
 
@@ -194,10 +194,10 @@ class PatchMaker:
                     return patch.toPv
 
             return None
-        
+
     class Patchfile:
         """ A single patchfile for a package. """
-        
+
         def __init__(self, package):
             self.package = package
             self.packageName = package.packageName
@@ -233,7 +233,7 @@ class PatchMaker:
         def fromFile(self, packageDir, patchFilename, sourceFile, targetFile):
             """ Creates the data structures from an existing patchfile
             on disk. """
-            
+
             self.file = FileSpec()
             self.file.fromFile(packageDir, patchFilename)
             self.sourceFile = sourceFile
@@ -241,7 +241,7 @@ class PatchMaker:
 
         def loadXml(self, xpatch):
             """ Reads the data structures from an xml file. """
-            
+
             self.packageName = xpatch.Attribute('name') or self.packageName
             self.platform = xpatch.Attribute('platform') or self.platform
             self.version = xpatch.Attribute('version') or self.version
@@ -287,7 +287,7 @@ class PatchMaker:
     class Package:
         """ This is a particular package.  This contains all of the
         information needed to reconstruct the package's desc file. """
-        
+
         def __init__(self, packageDesc, patchMaker, xpackage = None):
             self.packageDir = Filename(patchMaker.installDir, packageDesc.getDirname())
             self.packageDesc = packageDesc
@@ -312,19 +312,19 @@ class PatchMaker:
         def getCurrentKey(self):
             """ Returns the key to locate the current version of this
             package. """
-            
+
             return (self.packageName, self.platform, self.version, self.hostUrl, self.currentFile)
 
         def getBaseKey(self):
             """ Returns the key to locate the "base" or oldest version
             of this package. """
-            
+
             return (self.packageName, self.platform, self.version, self.hostUrl, self.baseFile)
 
         def getTopKey(self):
             """ Returns the key to locate the "top" or newest version
             of this package. """
-            
+
             return (self.packageName, self.platform, self.version, self.hostUrl, self.topFile)
 
         def getGenericKey(self, fileSpec):
@@ -345,7 +345,7 @@ class PatchMaker:
             if not self.doc.LoadFile():
                 print "Couldn't read %s" % (packageDescFullpath)
                 return False
-            
+
             xpackage = self.doc.FirstChildElement('package')
             if not xpackage:
                 return False
@@ -358,7 +358,7 @@ class PatchMaker:
             # other hosts, which means we'll need to fill in a value
             # here for those hosts.
             self.hostUrl = None
-        
+
             self.currentFile = None
             self.baseFile = None
             self.topFile = None
@@ -388,7 +388,7 @@ class PatchMaker:
                 else:
                     # There's a new version this pass.  Update it.
                     self.anyChanges = True
-                
+
             else:
                 # If there isn't a top_version yet, we have to make
                 # one, by duplicating the currentFile.
@@ -516,7 +516,7 @@ class PatchMaker:
             xarchive = TiXmlElement('top_version')
             self.currentFile.storeXml(xarchive)
             xpackage.InsertEndChild(xarchive)
-            
+
             for patchfile in self.patches:
                 xpatch = patchfile.makeXml(self)
                 xpackage.InsertEndChild(xpatch)
@@ -571,7 +571,7 @@ class PatchMaker:
         patches for all packages; otherwise, it should be a list of
         package name strings, limiting the set of packages that are
         processed. """
-        
+
         if not self.readContentsFile():
             return False
         self.buildPatchChains()
@@ -587,7 +587,7 @@ class PatchMaker:
     def cleanup(self):
         """ Should be called on exit to remove temporary files and
         such created during processing. """
-        
+
         for pv in self.packageVersions.values():
             pv.cleanup()
 
@@ -596,11 +596,11 @@ class PatchMaker:
         constructs a patch chain from the version represented by
         fileSpec to the current version of this package, if possible.
         Returns the patch chain if successful, or None otherwise. """
-        
+
         package = self.readPackageDescFile(descFilename)
         if not package:
             return None
-        
+
         self.buildPatchChains()
         fromPv = self.getPackageVersion(package.getGenericKey(fileSpec))
         toPv = package.currentPv
@@ -619,7 +619,7 @@ class PatchMaker:
         package = self.Package(Filename(descFilename), self)
         if not package.readDescFile(doProcessing = False):
             return None
-        
+
         self.packages.append(package)
         return package
 
@@ -640,7 +640,7 @@ class PatchMaker:
             contentsSeq.loadXml(xcontents)
             contentsSeq += 1
             contentsSeq.storeXml(xcontents)
-            
+
             xpackage = xcontents.FirstChildElement('package')
             while xpackage:
                 solo = xpackage.Attribute('solo')
@@ -651,7 +651,7 @@ class PatchMaker:
                     package = self.Package(filename, self, xpackage)
                     package.readDescFile(doProcessing = True)
                     self.packages.append(package)
-                    
+
                 xpackage = xpackage.NextSiblingElement('package')
 
         self.contentsDoc = doc
@@ -685,7 +685,7 @@ class PatchMaker:
             pv = self.PackageVersion(*key)
             self.packageVersions[k] = pv
         return pv
-    
+
     def buildPatchChains(self):
         """ Builds up the chains of PackageVersions and the patchfiles
         that connect them. """
@@ -696,7 +696,7 @@ class PatchMaker:
             if not package.baseFile:
                 # This package doesn't have any versions yet.
                 continue
-            
+
             currentPv = self.getPackageVersion(package.getCurrentKey())
             package.currentPv = currentPv
             currentPv.packageCurrent = package
@@ -710,7 +710,7 @@ class PatchMaker:
             topPv = self.getPackageVersion(package.getTopKey())
             package.topPv = topPv
             topPv.packageTop = package
-            
+
             for patchfile in package.patches:
                 self.recordPatchfile(patchfile)
 
@@ -757,7 +757,7 @@ class PatchMaker:
         # What's the current version on the top of the tree?
         topPv = package.topPv
         currentPv = package.currentPv
-        
+
         if topPv != currentPv:
             # They're different, so build a new patch.
             filename = Filename(package.currentFile.filename + '.%s.patch' % (package.patchVersion))
@@ -786,7 +786,7 @@ class PatchMaker:
                            v1.file, v2.file)
         package.patches.append(patchfile)
         package.anyChanges = True
-        
+
         self.recordPatchfile(patchfile)
 
         return True

+ 6 - 6
direct/src/p3d/ScanDirectoryNode.py

@@ -1,9 +1,9 @@
-from pandac.PandaModules import VirtualFileSystem, VirtualFileMountSystem, Filename, TiXmlDocument
+from panda3d.core import VirtualFileSystem, VirtualFileMountSystem, Filename, TiXmlDocument
 vfs = VirtualFileSystem.getGlobalPtr()
 
 class ScanDirectoryNode:
     """ This class is used to scan a list of files on disk. """
-    
+
     def __init__(self, pathname, ignoreUsageXml = False):
         self.pathname = pathname
         self.filenames = []
@@ -38,7 +38,7 @@ class ScanDirectoryNode:
                 if not isinstance(vfile.getMount(), VirtualFileMountSystem):
                     # Not a real file; ignore it.
                     continue
-                
+
             if vfile.isDirectory():
                 # A nested directory.
                 subdir = ScanDirectoryNode(vfile.getFilename(), ignoreUsageXml = ignoreUsageXml)
@@ -49,7 +49,7 @@ class ScanDirectoryNode:
                 # A nested file.
                 self.filenames.append(vfile.getFilename())
                 self.fileSize += vfile.getFileSize()
-                
+
             else:
                 # Some other wacky file thing.
                 self.filenames.append(vfile.getFilename())
@@ -78,7 +78,7 @@ class ScanDirectoryNode:
                 self.nested.remove(subdir)
                 self.nestedSize -= subdir.getTotalSize()
                 return subdir
-            
+
             result = subdir.extractSubdir(pathname)
             if result:
                 self.nestedSize -= result.getTotalSize()
@@ -89,5 +89,5 @@ class ScanDirectoryNode:
                 return result
 
         return None
-    
+
 

+ 4 - 4
direct/src/p3d/coreapi.pdef

@@ -1,4 +1,4 @@
-from pandac.PandaModules import getModelPath, Filename, ConfigVariableFilename, DSearchPath, ExecutionEnvironment, PandaSystem
+from panda3d.core import getModelPath, Filename, ConfigVariableFilename, DSearchPath, ExecutionEnvironment, PandaSystem
 
 # This file defines a number of standard "packages" that correspond to
 # a Panda3D distribution.  These packages are built by passing this
@@ -57,7 +57,7 @@ class images(package):
             basename = '%s.jpg' % (name)
             filename = Filename(basename)
             found = filename.resolveFilename(search)
-            
+
         if found:
             # Add the image file to the package
             file(filename, newName = basename, extract = True)
@@ -91,7 +91,7 @@ class certlist(package):
     # ".pem" or ".crt" extension, and they should be added with the
     # extract = True flag so they will be extracted to disk.
     pass
-    
+
 
 class p3dcert(package):
     # This special application, used to pop up a dialog to prompt the
@@ -102,7 +102,7 @@ class p3dcert(package):
         # On Mac, we package up a P3DCert.app bundle.  This includes
         # specifications in the plist file to avoid creating a dock
         # icon and stuff.
-        
+
         # Find p3dcert.plist in the direct source tree.
         import direct
         plist = Filename(direct.__path__[0], 'plugin/p3dcert.plist')

+ 6 - 10
direct/src/p3d/packp3d.py

@@ -93,8 +93,8 @@ import os
 import getopt
 import glob
 import direct
-from direct.p3d import Packager 
-from pandac.PandaModules import *
+from direct.p3d import Packager
+from panda3d.core import *
 
 # Temp hack for debugging.
 #from direct.p3d.AppRunner import dummyAppRunner; dummyAppRunner()
@@ -113,7 +113,7 @@ def makePackedApp(args):
     configFlags = []
     requires = []
     allowPythonDev = False
-    
+
     for option, value in opts:
         if option == '-o':
             appFilename = Filename.fromOsSpecific(value)
@@ -183,11 +183,7 @@ def makePackedApp(args):
             main = os.path.split(main[0])[1]
 
     main = Filename.fromOsSpecific(main)
-    mainModule = Filename(main)
-    mainModule.setExtension('')
 
-    mainModule = mainModule.cStr().replace('/', '.')
-    
     packager.installDir = appDir
     packager.allowPythonDev = allowPythonDev
 
@@ -206,7 +202,7 @@ def makePackedApp(args):
         # panda3d.
         if 'panda3d' not in [t[0] for t in requires]:
             packager.do_require('panda3d')
-        
+
         for name, version, host in requires:
             packager.do_require(name, version = version, host = host)
 
@@ -214,10 +210,10 @@ def makePackedApp(args):
             packager.do_config(**dict(configFlags))
 
         packager.do_dir(root)
-        packager.do_mainModule(mainModule)
+        packager.do_main(main)
         packager.endPackage()
         packager.close()
-        
+
     except Packager.PackagerError:
         # Just print the error message and exit gracefully.
         inst = sys.exc_info()[1]

+ 27 - 4
direct/src/p3d/panda3d.pdef

@@ -1,4 +1,4 @@
-from pandac.PandaModules import Filename, PandaSystem, getModelPath
+from panda3d.core import Filename, PandaSystem, getModelPath
 
 # This file defines a number of standard "packages" that correspond to
 # a Panda3D distribution.  These packages are built by passing this
@@ -54,7 +54,11 @@ class panda3d(package):
            'direct.showutil.*',
            'direct.stdpy.*',
            'direct.task.*')
-    module('panda3d')
+
+    module('panda3d.core',
+           'panda3d.direct',
+           'panda3d.fx',
+           'panda3d.physics')
 
     # Include various standard Python encodings.  The rest is in morepy.
     module('encodings', 'encodings.aliases', 'encodings.undefined,'
@@ -65,9 +69,19 @@ class panda3d(package):
     import direct
     file(Filename(direct.__path__[0], 'filter/*.sha'), newDir = 'direct/filter')
 
+    # pandac.PandaModules pulls in other Panda3D libs automatically.
     # Exclude these Panda3D libs; they are big and many applications don't
     # use them.  We define them as separate, optional packages, below,
     # except for skel - this is useless in a shipped game anyways.
+    excludeModule('panda3d.egg')
+    excludeModule('panda3d.ode')
+    excludeModule('panda3d.bullet')
+    excludeModule('panda3d.vision')
+    excludeModule('panda3d.skel')
+    excludeModule('panda3d.physx')
+    excludeModule('panda3d.ai')
+    excludeModule('panda3d.vrpn')
+
     excludeModule('libpandaegg')
     excludeModule('libpandaode')
     excludeModule('libpandabullet')
@@ -89,6 +103,8 @@ class panda3d(package):
 
     excludeModule('MySQLdb', '_mysql')
 
+    excludeModule('xml', 'xml.parsers.expat', 'xml.sax')
+
     # Most of the core Panda3D DLL's will be included implicitly due to
     # being referenced by the above Python code.  Here we name a few more
     # that are also needed, but aren't referenced by any code.  Again,
@@ -96,7 +112,7 @@ class panda3d(package):
     # platform-specific extension for an executable.
     file('libpandagl.dll', 'libp3tinydisplay.dll')
     if platform.startswith('win'):
-        file('libpandadx8.dll', 'libpandadx9.dll')
+        file('libpandadx9.dll')
 
     # A basic config file is needed to lay some some fundamental runtime
     # variables.
@@ -104,7 +120,6 @@ class panda3d(package):
         auxDisplays = """
 aux-display pandagl
 aux-display pandadx9
-aux-display pandadx8
 aux-display p3tinydisplay
 """
     else:
@@ -270,6 +285,7 @@ class egg(package):
     config(display_name = "Panda3D egg loader")
     require('panda3d')
 
+    module('panda3d.egg', required = True)
     file('libpandaegg.dll', required = True)
     file('libp3ptloader.dll', required = True)
 
@@ -287,6 +303,7 @@ class ode(package):
     config(display_name = "Panda3D Open Dynamics Engine integration")
     require('panda3d')
 
+    module('panda3d.ode', required = True)
     file('libpandaode.dll', required = True)
 
 class bullet(package):
@@ -297,6 +314,7 @@ class bullet(package):
     config(display_name = "Panda3D Bullet integration")
     require('panda3d')
 
+    module('panda3d.bullet', required = True)
     file('libpandabullet.dll', required = True)
 
 class physx(package):
@@ -307,6 +325,7 @@ class physx(package):
     config(display_name = "Panda3D PhysX integration")
     require('panda3d')
 
+    module('panda3d.physx', required = True)
     file('libpandaphysx.dll', required = True)
     file('physxcudart_20.dll')
     file('PhysXDevice.dll')
@@ -320,6 +339,7 @@ class ai(package):
     config(display_name = "Panda3D AI modules")
     require('panda3d')
 
+    module('panda3d.ai', required = True)
     file('libpandaai.dll', required = True)
 
 class vision(package):
@@ -331,6 +351,7 @@ class vision(package):
     config(display_name = "Panda3D vision modules")
     require('panda3d')
 
+    module('panda3d.vision', required = True)
     file('libp3vision.dll', required = True)
 
 class rocket(package):
@@ -341,6 +362,7 @@ class rocket(package):
     config(display_name = "Panda3D libRocket support")
     require('panda3d')
 
+    module('panda3d.rocket', required = True)
     module('_rocketcore', '_rocketcontrols')
     file('libp3rocket.dll', required = True)
 
@@ -351,6 +373,7 @@ class vrpn(package):
     config(display_name = "Panda3D VRPN support")
     require('panda3d')
 
+    module('panda3d.vrpn', required = True)
     file('libp3vrpn.dll', required = True)
 
 

+ 2 - 2
direct/src/p3d/pdeploy.py

@@ -102,7 +102,7 @@ Options:
      is licensed under.
      Only relevant when generating a graphical installer.
 
-  -O 
+  -O
      Specify this option when generating a graphical installer to omit
      the default checkboxes for "run this program" and "install a
      desktop shortcut" on completion.
@@ -147,7 +147,7 @@ import sys
 import os
 import getopt
 from direct.p3d.DeploymentTools import Standalone, Installer, Icon
-from pandac.PandaModules import Filename, PandaSystem
+from panda3d.core import Filename, PandaSystem
 
 def usage(code, msg = ''):
     if not msg:

+ 3 - 3
direct/src/p3d/pmerge.py

@@ -47,7 +47,7 @@ import getopt
 import os
 
 from direct.p3d import PackageMerger
-from pandac.PandaModules import *
+from panda3d.core import *
 
 def usage(code, msg = ''):
     print >> sys.stderr, usageText % {'prog' : os.path.split(sys.argv[0])[1]}
@@ -66,7 +66,7 @@ for opt, arg in opts:
         installDir = Filename.fromOsSpecific(arg)
     elif opt == '-p':
         packageNames += arg.split(',')
-        
+
     elif opt == '-h':
         usage(0)
     else:
@@ -92,7 +92,7 @@ try:
     for dir in inputDirs:
         pm.merge(dir, packageNames = packageNames)
     pm.close()
-        
+
 except PackageMerger.PackageMergerError:
     # Just print the error message and exit gracefully.
     inst = sys.exc_info()[1]

+ 1 - 1
direct/src/p3d/ppackage.py

@@ -132,7 +132,7 @@ import getopt
 import os
 
 from direct.p3d import Packager
-from pandac.PandaModules import *
+from panda3d.core import *
 
 def usage(code, msg = ''):
     print >> sys.stderr, usageText % {

+ 2 - 2
direct/src/p3d/ppatcher.py

@@ -60,7 +60,7 @@ import getopt
 import os
 
 from direct.p3d.PatchMaker import PatchMaker
-from pandac.PandaModules import *
+from panda3d.core import *
 
 def usage(code, msg = ''):
     print >> sys.stderr, usageText % {'prog' : os.path.split(sys.argv[0])[1]}
@@ -76,7 +76,7 @@ installDir = None
 for opt, arg in opts:
     if opt == '-i':
         installDir = Filename.fromOsSpecific(arg)
-        
+
     elif opt == '-h':
         usage(0)
     else:

+ 1 - 1
direct/src/p3d/runp3d.py

@@ -28,7 +28,7 @@ import sys
 import getopt
 from AppRunner import AppRunner, ArgumentError
 from direct.task.TaskManagerGlobal import taskMgr
-from pandac.PandaModules import Filename
+from panda3d.core import Filename
 
 def parseSysArgs():
     """ Handles sys.argv, if there are any local arguments, and

+ 2 - 2
direct/src/p3d/thirdparty.pdef

@@ -13,7 +13,7 @@
 
 # Also see panda3d.pdef.
 
-from pandac.PandaModules import Filename
+from panda3d.core import Filename
 import sys
 
 class wx(package):
@@ -95,7 +95,7 @@ class twisted(package):
            'twisted.runner', 'twisted.scripts', 'twisted.spread',
            'twisted.tap', 'twisted.trial', 'twisted.vfs',
            'twisted.web', 'twisted.web2', 'twisted.words')
-           
+
     module('twisted.*', 'twisted.*.*')
 
 class pil(package):

+ 2 - 2
direct/src/particles/ForceGroup.py

@@ -12,7 +12,7 @@ class ForceGroup(DirectObject):
 
     def __init__(self, name=None):
         if (name == None):
-            self.name = 'ForceGroup-%d' % ForceGroup.id 
+            self.name = 'ForceGroup-%d' % ForceGroup.id
             ForceGroup.id += 1
         else:
             self.name = name
@@ -65,7 +65,7 @@ class ForceGroup(DirectObject):
     def getNodePath(self):
         return self.nodePath
 
-    # Utility functions 
+    # Utility functions
     def __getitem__(self, index):
         numForces = self.node.getNumForces()
         if ((index < 0) or (index >= numForces)):

+ 1 - 1
direct/src/particles/ParticleManagerGlobal.py

@@ -1,6 +1,6 @@
 """
 Contains the global particle system manager
 """
-from pandac.PandaModules import ParticleSystemManager
+from panda3d.physics import ParticleSystemManager
 
 particleMgr = ParticleSystemManager()

+ 29 - 30
direct/src/particles/Particles.py

@@ -1,27 +1,27 @@
 from pandac.PandaModules import *
 
-from pandac.PandaModules import ParticleSystem
-from pandac.PandaModules import BaseParticleFactory
-from pandac.PandaModules import PointParticleFactory
-from pandac.PandaModules import ZSpinParticleFactory
-#from pandac.PandaModules import OrientedParticleFactory
-from pandac.PandaModules import BaseParticleRenderer
-from pandac.PandaModules import PointParticleRenderer
-from pandac.PandaModules import LineParticleRenderer
-from pandac.PandaModules import GeomParticleRenderer
-from pandac.PandaModules import SparkleParticleRenderer
-#from pandac.PandaModules import SpriteParticleRenderer
-from pandac.PandaModules import BaseParticleEmitter
-from pandac.PandaModules import ArcEmitter
-from pandac.PandaModules import BoxEmitter
-from pandac.PandaModules import DiscEmitter
-from pandac.PandaModules import LineEmitter
-from pandac.PandaModules import PointEmitter
-from pandac.PandaModules import RectangleEmitter
-from pandac.PandaModules import RingEmitter
-from pandac.PandaModules import SphereSurfaceEmitter
-from pandac.PandaModules import SphereVolumeEmitter
-from pandac.PandaModules import TangentRingEmitter
+from panda3d.physics import ParticleSystem
+from panda3d.physics import BaseParticleFactory
+from panda3d.physics import PointParticleFactory
+from panda3d.physics import ZSpinParticleFactory
+#from panda3d.physics import OrientedParticleFactory
+from panda3d.physics import BaseParticleRenderer
+from panda3d.physics import PointParticleRenderer
+from panda3d.physics import LineParticleRenderer
+from panda3d.physics import GeomParticleRenderer
+from panda3d.physics import SparkleParticleRenderer
+#from panda3d.physics import SpriteParticleRenderer
+from panda3d.physics import BaseParticleEmitter
+from panda3d.physics import ArcEmitter
+from panda3d.physics import BoxEmitter
+from panda3d.physics import DiscEmitter
+from panda3d.physics import LineEmitter
+from panda3d.physics import PointEmitter
+from panda3d.physics import RectangleEmitter
+from panda3d.physics import RingEmitter
+from panda3d.physics import SphereSurfaceEmitter
+from panda3d.physics import SphereVolumeEmitter
+from panda3d.physics import TangentRingEmitter
 
 import SpriteParticleRendererExt
 
@@ -464,7 +464,7 @@ class Particles(ParticleSystem):
                     t_b = seg.getTimeBegin()
                     t_e = seg.getTimeEnd()
                     mod = seg.isModulated()
-                    fun = seg.getFunction()                    
+                    fun = seg.getFunction()
                     typ = type(fun).__name__
                     if typ == 'ColorInterpolationFunctionConstant':
                         c_a = fun.getColorA()
@@ -560,7 +560,7 @@ class Particles(ParticleSystem):
             file.write('# Tangent Ring parameters\n')
             file.write(targ + '.emitter.setRadius(%.4f)\n' % self.emitter.getRadius())
             file.write(targ + '.emitter.setRadiusSpread(%.4f)\n' % self.emitter.getRadiusSpread())
-            
+
     def getPoolSizeRanges(self):
         litterRange = [max(1,self.getLitterSize()-self.getLitterSpread()),
                        self.getLitterSize(),
@@ -573,9 +573,9 @@ class Particles(ParticleSystem):
         print 'Litter Ranges:    ',litterRange
         print 'LifeSpan Ranges:  ',lifespanRange
         print 'BirthRate Ranges: ',birthRateRange
-        
+
         return dict(zip(('min','median','max'),[l*s/b for l,s,b in zip(litterRange,lifespanRange,birthRateRange)]))
-            
+
 
     def accelerate(self,time,stepCount = 1,stepTime=0.0):
         if time > 0.0:
@@ -585,14 +585,13 @@ class Particles(ParticleSystem):
             else:
                 stepCount = int(float(time)/stepTime)
                 remainder = time-stepCount*stepTime
-                
+
             for step in range(stepCount):
                 base.particleMgr.doParticles(stepTime,self,False)
                 base.physicsMgr.doPhysics(stepTime,self)
-                
+
             if(remainder):
                 base.particleMgr.doParticles(remainder,self,False)
                 base.physicsMgr.doPhysics(remainder,self)
-                
+
             self.render()
-        

+ 1 - 2
direct/src/particles/SpriteParticleRendererExt.py

@@ -1,4 +1,4 @@
-from pandac.PandaModules import SpriteParticleRenderer
+from panda3d.physics import SpriteParticleRenderer
 
 
 class SpriteParticleRendererExt(SpriteParticleRenderer):
@@ -126,4 +126,3 @@ class SpriteParticleRendererExt(SpriteParticleRenderer):
         m.removeNode()
 
         return True
-

+ 36 - 77
direct/src/plugin/p3dPythonRun.cxx

@@ -34,11 +34,11 @@ TypeHandle P3DPythonRun::P3DWindowHandle::_type_handle;
 ////////////////////////////////////////////////////////////////////
 //     Function: P3DPythonRun::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 P3DPythonRun::
 P3DPythonRun(const char *program_name, const char *archive_file,
-             FHandle input_handle, FHandle output_handle, 
+             FHandle input_handle, FHandle output_handle,
              const char *log_pathname, bool interactive_console) {
   P3DWindowHandle::init_type();
   init_xml();
@@ -115,7 +115,7 @@ P3DPythonRun(const char *program_name, const char *archive_file,
 ////////////////////////////////////////////////////////////////////
 //     Function: P3DPythonRun::Destructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 P3DPythonRun::
 ~P3DPythonRun() {
@@ -150,72 +150,32 @@ run_python() {
   PyRun_SimpleString("import sys; sys.dll_suffix = '_d'");
 #endif
 
-  // We'll need libpandaexpress to be imported before we can load
-  // _vfsimporter.  So, find it and load it.
-  Filename libpandaexpress;
-
-#ifdef _WIN32
-  // Of course it's already resident, so use that version.
-  wstring basename = Filename::dso_filename("libpandaexpress.so").to_os_specific_w();
-  HMODULE h = GetModuleHandleW(basename.c_str());
-
-  if (h == NULL) {
-    nout << "Can't find libpandaexpress in memory.\n";
-  } else {
-    static const int buffer_size = 4096;
-    wchar_t buffer[buffer_size];
-    GetModuleFileNameW(h, buffer, buffer_size);
-    libpandaexpress = Filename::from_os_specific_w(buffer);
-  }
-#endif  // _WIN32
-
-  if (libpandaexpress.empty()) {
-    // Go look for it on disk.
-    libpandaexpress = Filename(_archive_file.get_dirname(), 
-                               Filename::dso_filename("libpandaexpress.so"));
-#if defined(__APPLE__) && PY_VERSION_HEX < 0x02050000
-    // On OSX, for Python versions 2.4 and before, we have to load the
-    // .so file, not the .dylib file.
-    libpandaexpress.set_type(Filename::T_general);
-#endif
-  }
-
-  if (!libpandaexpress.exists()) {
-    nout << "Can't find " << libpandaexpress << "\n";
-    return false;
-  }
+  Filename dir = _archive_file.get_dirname();
 
-  // We need the "imp" built-in module for that.
-  PyObject *imp_module = PyImport_ImportModule("imp");
-  if (imp_module == NULL) {
-    nout << "Failed to import module imp\n";
+  // We'll need to synthesize a 'panda3d' module before loading
+  // VFSImporter.  We could simply freeze it, but Python has a bug
+  // setting __path__ of frozen modules properly.
+  PyObject *panda3d_module = PyImport_AddModule("panda3d");
+  if (panda3d_module == NULL) {
+    nout << "Failed to create panda3d module:\n";
     PyErr_Print();
     return false;
   }
 
-  // And here's where we run into a brick wall attempting to make the
-  // whole plugin system Unicode-safe for Windows.  It turns out that
-  // this Python call, imp.load_dynamic(), will not accept a Unicode
-  // pathname.  So if the DLL in question is in a location that
-  // contains non-ASCII characters, it can't be loaded.
-  string os_specific = libpandaexpress.to_os_specific();
-  PyObject *result = PyObject_CallMethod
-    (imp_module, (char *)"load_dynamic", (char *)"ss", 
-     "libpandaexpress", os_specific.c_str());
-  if (result == NULL) {
-    nout << "Failed to import libpandaexpress as a module\n";
-    PyErr_Print();
-    return false;
-  }
-  Py_DECREF(result);
-  Py_DECREF(imp_module);
+  // Set the __path__ such that it can find panda3d/core.pyd, etc.
+  Filename panda3d_dir(dir, "panda3d");
+  string dir_str = panda3d_dir.to_os_specific();
+  PyObject *panda3d_dict = PyModule_GetDict(panda3d_module);
+  PyObject *panda3d_path = Py_BuildValue("[s#]", dir_str.data(), dir_str.length());
+  PyDict_SetItemString(panda3d_dict, "__path__", panda3d_path);
+  Py_DECREF(panda3d_path);
 
   // Now we can load _vfsimporter.pyd.  Since this is a magic frozen
   // pyd, importing it automatically makes all of its frozen contents
   // available to import as well.
   PyObject *vfsimporter = PyImport_ImportModule("_vfsimporter");
   if (vfsimporter == NULL) {
-    nout << "Failed to import _vfsimporter\n";
+    nout << "Failed to import _vfsimporter:\n";
     PyErr_Print();
     return false;
   }
@@ -224,15 +184,15 @@ run_python() {
   // And now we can import the VFSImporter module that was so defined.
   PyObject *vfsimporter_module = PyImport_ImportModule("VFSImporter");
   if (vfsimporter_module == NULL) {
-    nout << "Failed to import VFSImporter\n";
+    nout << "Failed to import VFSImporter:\n";
     PyErr_Print();
     return false;
   }
 
   // And register the VFSImporter.
-  result = PyObject_CallMethod(vfsimporter_module, (char *)"register", (char *)"");
+  PyObject *result = PyObject_CallMethod(vfsimporter_module, (char *)"register", (char *)"");
   if (result == NULL) {
-    nout << "Failed to call VFSImporter.register()\n";
+    nout << "Failed to call VFSImporter.register():\n";
     PyErr_Print();
     return false;
   }
@@ -248,7 +208,6 @@ run_python() {
     nout << "Could not read " << _archive_file << "\n";
     return false;
   }
-  Filename dir = _archive_file.get_dirname();
   VirtualFileSystem *vfs = VirtualFileSystem::get_global_ptr();
   if (!vfs->mount(mf, dir, VirtualFileSystem::MF_read_only)) {
     nout << "Could not mount " << _archive_file << "\n";
@@ -346,7 +305,7 @@ run_python() {
   }
   Py_DECREF(result);
   Py_DECREF(request_func);
- 
+
 
   // Now add check_comm() as a task.  It can be a threaded task, but
   // this does mean that application programmers will have to be alert
@@ -545,7 +504,7 @@ handle_command(TiXmlDocument *doc) {
         assert(!needs_response);
         int instance_id;
         TiXmlElement *xwparams = xcommand->FirstChildElement("wparams");
-        if (xwparams != (TiXmlElement *)NULL && 
+        if (xwparams != (TiXmlElement *)NULL &&
             xcommand->QueryIntAttribute("instance_id", &instance_id) == TIXML_SUCCESS) {
           setup_window(instance_id, xwparams);
         }
@@ -591,7 +550,7 @@ handle_command(TiXmlDocument *doc) {
             _sent_objects.erase(si);
           }
         }
-        
+
       } else {
         nout << "Unhandled command " << cmd << "\n";
         if (needs_response) {
@@ -678,7 +637,7 @@ handle_pyobj_command(TiXmlElement *xcommand, bool needs_response,
         if (method_name == NULL) {
           // No method name; call the object directly.
           result = PyObject_CallObject(obj, params);
-          
+
           // Several special-case "method" names.
         } else if (strcmp(method_name, "__bool__") == 0) {
           result = PyBool_FromLong(PyObject_IsTrue(obj));
@@ -713,7 +672,7 @@ handle_pyobj_command(TiXmlElement *xcommand, bool needs_response,
                 PyErr_Clear();
               }
             }
-            
+
             // If the object supports the mapping protocol, store it
             // in the object's dictionary.
             if (!success && PyMapping_Check(obj)) {
@@ -761,7 +720,7 @@ handle_pyobj_command(TiXmlElement *xcommand, bool needs_response,
                 PyErr_Clear();
               }
             }
-            
+
             if (success) {
               result = Py_True;
             } else {
@@ -1066,8 +1025,8 @@ py_request_func(PyObject *args) {
     PyObject *value;
     int needs_response;
     int unique_id;
-    if (!PyArg_ParseTuple(extra_args, "sOsOii", 
-                          &operation, &object, &property_name, &value, 
+    if (!PyArg_ParseTuple(extra_args, "sOsOii",
+                          &operation, &object, &property_name, &value,
                           &needs_response, &unique_id)) {
       return NULL;
     }
@@ -1256,7 +1215,7 @@ set_instance_info(P3DCInstance *inst, TiXmlElement *xinstance) {
   xinstance->Attribute("respect_per_platform", &respect_per_platform);
 
   PyObject *result = PyObject_CallMethod
-    (_runner, (char *)"setInstanceInfo", (char *)"sssiOi", root_dir, 
+    (_runner, (char *)"setInstanceInfo", (char *)"sssiOi", root_dir,
      log_directory, super_mirror, verify_contents, main, respect_per_platform);
   Py_DECREF(main);
 
@@ -1347,7 +1306,7 @@ set_p3d_filename(P3DCInstance *inst, TiXmlElement *xfparams) {
       value = value_c;
     }
 
-    PyObject *tuple = Py_BuildValue("(ss)", keyword.c_str(), 
+    PyObject *tuple = Py_BuildValue("(ss)", keyword.c_str(),
                                     value.c_str());
     PyList_Append(token_list, tuple);
     Py_DECREF(tuple);
@@ -1419,7 +1378,7 @@ setup_window(P3DCInstance *inst, TiXmlElement *xwparams) {
   }
 
   int win_x, win_y, win_width, win_height;
-  
+
   xwparams->Attribute("win_x", &win_x);
   xwparams->Attribute("win_y", &win_y);
   xwparams->Attribute("win_width", &win_width);
@@ -1531,7 +1490,7 @@ terminate_session() {
         Py_DECREF(result);
       }
     }
-    
+
     _session_terminated = true;
   }
 }
@@ -1772,7 +1731,7 @@ xml_to_pyobj(TiXmlElement *xvalue) {
     int object_id;
     if (xvalue->QueryIntAttribute("object_id", &object_id) == TIXML_SUCCESS) {
       // Construct a new BrowserObject wrapper around this object.
-      return PyObject_CallFunction(_browser_object_class, (char *)"Oi", 
+      return PyObject_CallFunction(_browser_object_class, (char *)"Oi",
                                    _runner, object_id);
     }
 
@@ -1852,7 +1811,7 @@ rt_thread_run() {
     }
 
     // Successfully read an XML document.
-    
+
     // Check for one special case: the "exit" command means we shut
     // down the read thread along with everything else.
     TiXmlElement *xcommand = doc->FirstChildElement("command");
@@ -1875,7 +1834,7 @@ rt_thread_run() {
 ////////////////////////////////////////////////////////////////////
 //     Function: P3DPythonRun::P3DWindowHandle::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 P3DPythonRun::P3DWindowHandle::
 P3DWindowHandle(P3DPythonRun *p3dpython, P3DCInstance *inst,

+ 46 - 43
direct/src/plugin/p3dX11SplashWindow.cxx

@@ -28,10 +28,10 @@
 ////////////////////////////////////////////////////////////////////
 //     Function: P3DX11SplashWindow::Constructor
 //       Access: Public
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 P3DX11SplashWindow::
-P3DX11SplashWindow(P3DInstance *inst, bool make_visible) : 
+P3DX11SplashWindow(P3DInstance *inst, bool make_visible) :
   P3DSplashWindow(inst, make_visible)
 {
   // Init for parent process
@@ -61,7 +61,7 @@ P3DX11SplashWindow(P3DInstance *inst, bool make_visible) :
 ////////////////////////////////////////////////////////////////////
 //     Function: P3DX11SplashWindow::Destructor
 //       Access: Public, Virtual
-//  Description: 
+//  Description:
 ////////////////////////////////////////////////////////////////////
 P3DX11SplashWindow::
 ~P3DX11SplashWindow() {
@@ -312,13 +312,13 @@ stop_subprocess() {
   _pipe_write.close();
 
   static const int max_wait_ms = 2000;
-  
+
   // Wait for a certain amount of time for the process to stop by
   // itself.
   struct timeval start;
   gettimeofday(&start, NULL);
   int start_ms = start.tv_sec * 1000 + start.tv_usec / 1000;
-  
+
   int status;
   pid_t result = waitpid(_subprocess_pid, &status, WNOHANG);
   while (result != _subprocess_pid) {
@@ -326,20 +326,20 @@ stop_subprocess() {
       perror("waitpid");
       break;
     }
-    
+
     struct timeval now;
     gettimeofday(&now, NULL);
     int now_ms = now.tv_sec * 1000 + now.tv_usec / 1000;
     int elapsed = now_ms - start_ms;
-    
+
     if (elapsed > max_wait_ms) {
       // Tired of waiting.  Kill the process.
-      nout << "Force-killing splash window process, pid " << _subprocess_pid 
+      nout << "Force-killing splash window process, pid " << _subprocess_pid
            << "\n";
       kill(_subprocess_pid, SIGKILL);
       start_ms = now_ms;
     }
-    
+
     // Yield the timeslice and wait some more.
     struct timeval tv;
     tv.tv_sec = 0;
@@ -353,7 +353,7 @@ stop_subprocess() {
     nout << "  exited normally, status = "
          << WEXITSTATUS(status) << "\n";
   } else if (WIFSIGNALED(status)) {
-    nout << "  signalled by " << WTERMSIG(status) << ", core = " 
+    nout << "  signalled by " << WTERMSIG(status) << ", core = "
          << WCOREDUMP(status) << "\n";
   } else if (WIFSTOPPED(status)) {
     nout << "  stopped by " << WSTOPSIG(status) << "\n";
@@ -396,7 +396,7 @@ check_stopped() {
     nout << "  exited normally, status = "
          << WEXITSTATUS(status) << "\n";
   } else if (WIFSIGNALED(status)) {
-    nout << "  signalled by " << WTERMSIG(status) << ", core = " 
+    nout << "  signalled by " << WTERMSIG(status) << ", core = "
          << WCOREDUMP(status) << "\n";
   } else if (WIFSTOPPED(status)) {
     nout << "  stopped by " << WSTOPSIG(status) << "\n";
@@ -522,14 +522,14 @@ subprocess_run() {
           _win_height = event.xconfigure.height;
 
           set_button_range(_button_ready_image);
-          
+
           // If the window changes size, we need to recompute the
           // composed image.
           _needs_new_composite = true;
         }
         needs_redraw = true;
         break;
-        
+
       case MotionNotify:
         set_mouse_data(event.xmotion.x, event.xmotion.y, _mouse_down);
         break;
@@ -537,7 +537,7 @@ subprocess_run() {
       case ButtonPress:
         set_mouse_data(_mouse_x, _mouse_y, true);
         break;
-        
+
       case ButtonRelease:
         set_mouse_data(_mouse_x, _mouse_y, false);
         break;
@@ -563,7 +563,7 @@ subprocess_run() {
       needs_redraw = true;
       prev_label = _install_label;
     }
-    
+
     if (_progress_known != prev_progress_known) {
       needs_update_progress = true;
       needs_redraw_progress = true;
@@ -592,7 +592,7 @@ subprocess_run() {
         needs_redraw = true;
       }
     }
-    
+
     if (needs_redraw) {
       redraw();
       XFlush(_display);
@@ -619,23 +619,23 @@ subprocess_run() {
                    text_width + 4, text_height + 4, false);
         XDrawString(_display, _window, _graphics_context, text_x, text_y,
                     _install_label.c_str(), _install_label.size());
-        
+
         needs_draw_label = false;
       }
-      
+
       if (needs_redraw_progress) {
-        XClearArea(_display, _window, 
+        XClearArea(_display, _window,
                    bar_x, bar_y, bar_width, bar_height, false);
-        XDrawRectangle(_display, _window, _graphics_context, 
+        XDrawRectangle(_display, _window, _graphics_context,
                        bar_x, bar_y, bar_width, bar_height);
         needs_update_progress = true;
         needs_redraw_progress = false;
       }
-      
+
       if (needs_update_progress) {
         if (_progress_known) {
           int progress_width = (int)((bar_width - 2) * _install_progress + 0.5);
-          XFillRectangle(_display, _window, _bar_context, 
+          XFillRectangle(_display, _window, _bar_context,
                          bar_x + 1, bar_y + 1,
                          progress_width + 1, bar_height - 1);
         } else {
@@ -649,7 +649,7 @@ subprocess_run() {
             progress = block_travel * 2 - progress;
           }
 
-          XFillRectangle(_display, _window, _bar_context, 
+          XFillRectangle(_display, _window, _bar_context,
                          bar_x + 1 + progress, bar_y + 1,
                          block_width + 1, bar_height - 1);
         }
@@ -665,7 +665,7 @@ subprocess_run() {
       fd_set fds;
       FD_ZERO(&fds);
       FD_SET(read_fd, &fds);
-      
+
       // Sleep a bit to yield the timeslice if there's nothing new.
       struct timeval tv;
       tv.tv_sec = 0;
@@ -729,27 +729,30 @@ receive_command() {
       } else if (strcmp(cmd, "set_image_filename") == 0) {
         const string *image_filename = xcommand->Attribute(string("image_filename"));
         int image_placement;
-        if (image_filename != NULL && 
+        if (image_filename != NULL &&
             xcommand->QueryIntAttribute("image_placement", &image_placement) == TIXML_SUCCESS) {
-          
+
           X11ImageData *image = NULL;
           switch ((ImagePlacement)image_placement) {
           case IP_background:
             image = &_background_image;
             break;
-            
+
           case IP_button_ready:
             image = &_button_ready_image;
             set_button_range(_button_ready_image);
             break;
-            
+
           case IP_button_rollover:
             image = &_button_rollover_image;
             break;
-            
+
           case IP_button_click:
             image = &_button_click_image;
             break;
+
+          case IP_none:
+            break;
           }
           if (image != NULL) {
             if (image->_filename != *image_filename) {
@@ -804,7 +807,7 @@ redraw() {
     // If we have an image, draw it.
     int xo = (_win_width - _composite_width) / 2;
     int yo = (_win_height - _composite_height) / 2;
-    XPutImage(_display, _window, _graphics_context, _composite_image, 0, 0, 
+    XPutImage(_display, _window, _graphics_context, _composite_image, 0, 0,
               xo, yo, _composite_width, _composite_height);
 
     // Then clear the rectangles around it carefully (rather than just
@@ -840,12 +843,12 @@ make_window() {
   }
 
   X11_Window parent = 0;
-  
+
   // Hum, if we use the display provided by the browser,
   // it causes a crash in some browsers when you make an Xlib
   // call with the plugin window minimized.
   // So I kept XOpenDisplay until we have a better workaround.
-  
+
   //_display = (X11_Display*) _wparams.get_parent_window()._xdisplay;
   //_own_display = false;
   //if (_display == 0) {
@@ -871,7 +874,7 @@ make_window() {
     // Create a toplevel window.
     parent = XRootWindow(_display, _screen);
   }
-  
+
   assert(parent != None);
 
   int depth = DefaultDepth(_display, _screen);
@@ -955,7 +958,7 @@ setup_gc() {
   XFontStruct* fs = XLoadQueryFont(_display, "6x13");
 
   XGCValues gcval;
-  gcval.font = fs->fid; 
+  gcval.font = fs->fid;
   gcval.function = GXcopy;
   gcval.plane_mask = AllPlanes;
   gcval.foreground = BlackPixel(_display, _screen);
@@ -966,8 +969,8 @@ setup_gc() {
   if (_bg_pixel != -1) {
     gcval.background = _bg_pixel;
   }
-  _graphics_context = XCreateGC(_display, _window, 
-    GCFont | GCFunction | GCPlaneMask | GCForeground | GCBackground, &gcval); 
+  _graphics_context = XCreateGC(_display, _window,
+    GCFont | GCFunction | GCPlaneMask | GCForeground | GCBackground, &gcval);
 
   // Also create a gc for filling in the interior of the progress bar
   // in a pleasant blue color (or whatever color the user requested).
@@ -983,7 +986,7 @@ setup_gc() {
     gcval.foreground = bar.pixel;
   }
 
-  _bar_context = XCreateGC(_display, _window, 
+  _bar_context = XCreateGC(_display, _window,
     GCFont | GCFunction | GCPlaneMask | GCForeground | GCBackground, &gcval);
 }
 
@@ -998,7 +1001,7 @@ close_window() {
     XDestroyImage(_composite_image);
     _composite_image = NULL;
   }
-  
+
   if (_bar_context != None) {
     if (_bar_context != _graphics_context) {
       XFreeGC(_display, _bar_context);
@@ -1009,22 +1012,22 @@ close_window() {
     Colormap colormap = DefaultColormap(_display, _screen);
     XFreeColors(_display, colormap, &_bar_pixel, 1, 0);
   }
-  
+
   if (_fg_pixel != -1) {
     Colormap colormap = DefaultColormap(_display, _screen);
     XFreeColors(_display, colormap, &_fg_pixel, 1, 0);
   }
-  
+
   if (_bg_pixel != -1) {
     Colormap colormap = DefaultColormap(_display, _screen);
     XFreeColors(_display, colormap, &_bg_pixel, 1, 0);
   }
-  
+
   if (_graphics_context != None) {
     XFreeGC(_display, _graphics_context);
     _graphics_context = None;
   }
-  
+
   if (_window != None) {
     XDestroyWindow(_display, _window);
     _window = None;
@@ -1150,7 +1153,7 @@ compose_image() {
   }
 
   // Now load the image.
-  _composite_image = XCreateImage(_display, CopyFromParent, DefaultDepth(_display, _screen), 
+  _composite_image = XCreateImage(_display, CopyFromParent, DefaultDepth(_display, _screen),
                                   ZPixmap, 0, (char *)new_data, image1_width, image1_height, 32, 0);
   _composite_width = image1_width;
   _composite_height = image1_height;

+ 0 - 504
direct/src/pyinst/Builder.py

@@ -1,504 +0,0 @@
-import string
-import pprint
-import sys
-import os
-import ConfigParser
-import pprint
-import shutil
-import tempfile
-import ltoc
-import tocfilter
-import resource
-import archive
-import archivebuilder
-import carchive
-
-logfile = None
-autopath = []
-built = {}
-copyFile = None
-
-class Target:
-    def __init__(self, cfg, sectnm, cnvrts):
-        self.children = []
-        self._dependencies = ltoc.lTOC() # the stuff an outer package will need to use me
-        self.cfg = cfg
-        self.__name__ = 'joe'
-        for optnm in cfg.options(sectnm):
-            cnvrt = cnvrts.get(optnm, 'getstringlist')
-            if cnvrt:
-                f = getattr(self, cnvrt, None)
-                if f:
-                    self.__dict__[optnm] = f(cfg.get(sectnm, optnm))
-        if not hasattr(self, 'name'):
-            self.name = self.__name__
-        print "Initializing", self.__name__
-        self.pathprefix = autopath + self.pathprefix
-        self.pathprefix.append(os.path.join(pyinsthome, 'support'))
-        for z in self.zlib:
-            if z in self.cfg.sections():
-                self.children.append(z)
-            else:
-                raise ValueError, "%s - zlib '%s' does not refer to a sections" \
-                      % (self.name, z)
-        for i in range(len(self.misc)):
-            x = self.misc[i]
-            if x in self.cfg.sections():
-                if self.cfg.get(x, "type") == 'PYZ':
-                    self.zlib.append(x)
-                    self.misc[i] = None
-                self.children.append(x)
-        self.misc = filter(None, self.misc)
-        self.edit()
-        self.toc = ltoc.lTOC()
-        for thingie in self.excludes:
-            try:
-                fltr = tocfilter.makefilter(thingie, self.pathprefix)
-            except ValueError:
-                print "Warning: '%s' not found - no filter created" % thingie
-            else:
-                self.toc.addFilter(fltr)
-        if self.exstdlib:
-            self.toc.addFilter(tocfilter.StdLibFilter())
-        if self.extypes:
-            self.toc.addFilter(tocfilter.ExtFilter(self.extypes))
-        if self.expatterns:
-            self.toc.addFilter(tocfilter.PatternFilter(self.expatterns))
-
-        ##------utilities------##
-    def dump(self):
-        logfile.write("---- %s: %s -----\n" % (self.__class__.__name__, self.name))
-        pprint.pprint(self.__dict__, logfile)
-    def getstringlist(self, opt):
-        tmp = string.split(opt, ',')
-        return filter(None, map(string.strip, tmp))
-    def getstring(self, opt):
-        return opt
-    def getbool(self, opt):
-        if opt in ('0','f','F','n','N'):
-            return 0
-        return 1
-        ##-----framework-----##
-    def build(self):
-        print "Gathering components of %s" % self.name
-        self.gather()
-        logfile.write("Final Table of Contents for %s:\n" % self.name)
-        pprint.pprint(self.toc.toList(), logfile)
-        print "Creating %s" % self.name
-        self.assemble()
-        ##-----overrideables-----##
-    def edit(self):
-        pass
-    def gather(self):
-        pass
-    def assemble(self):
-        pass
-
-class PYZTarget(Target):
-    def __init__(self, cfg, sectnm, cnvrts):
-        Target.__init__(self, cfg, sectnm, cnvrts)
-        # to use a PYZTarget, you'll need imputil and archive
-        archivebuilder.GetCompiled([os.path.join(pyinsthome, 'imputil.py')])
-        print "pyinsthome:", pyinsthome
-        imputil = resource.makeresource('imputil.py', [pyinsthome])
-        self._dependencies.append(imputil)
-        archivebuilder.GetCompiled([os.path.join(pyinsthome, 'archive_rt.py')])
-        archmodule = resource.makeresource('archive_rt.py', [pyinsthome])
-        self._dependencies.merge(archmodule.dependencies())
-        self._dependencies.append(archmodule)
-        self.toc.addFilter(archmodule)
-        self.toc.addFilter(imputil)
-        for mod in archmodule.modules:
-            self.toc.addFilter(mod)
-    def edit(self):
-        if self.extypes:
-            print "PYZ target %s ignoring extypes = %s" % (self.__name__, self.extypes)
-
-    def gather(self):
-        for script in self.dependencies:
-            rsrc = resource.makeresource(script, self.pathprefix)
-            if not isinstance(rsrc, resource.scriptresource):
-                print "Bug alert - Made %s from %s!" % (rsrc, script)
-            self.toc.merge(rsrc.modules)
-        logfile.write("lTOC after expanding 'depends':\n")
-        pprint.pprint(self.toc.toList(), logfile)
-        for thingie in self.includes + self.directories + self.packages:
-            rsrc = resource.makeresource(thingie, self.pathprefix)
-##            if not isinstance(rsrc, resource.pythonresource):
-##                print "PYZ target %s ignoring include %s" % (self.name, thingie)
-##            else:
-            self.toc.merge(rsrc.contents())
-        logfile.write("lTOC after includes, dir, pkgs:\n")
-        pprint.pprint(self.toc.toList(), logfile)
-        self.toc.addFilter(tocfilter.ExtFilter(['.py', '.pyc', '.pyo'], 1))
-        logfile.write("Applying the following filters:\n")
-        pprint.pprint(self.toc.filters, logfile)
-        self.toc.filter()
-
-    def assemble(self):
-        contents = self.toc.toList()
-        if contents:
-            lib = archive.ZlibArchive()
-            lib.build(self.name, archivebuilder.GetCompiled(self.toc.toList()))
-
-class CollectTarget(Target):
-    def __init__(self, cfg, sectnm, cnvrts):
-        Target.__init__(self, cfg, sectnm, cnvrts)
-
-    _rsrcdict = {'COLLECT': resource.dirresource, 'PYZ': resource.zlibresource, 'CARCHIVE': resource.archiveresource}
-
-    def gather(self):
-        if self.support:
-            # the bare minimum
-            self.toc.merge([resource.makeresource('python20.dll')])
-            self.toc.merge([resource.makeresource('exceptions.pyc').asBinary()])
-        # zlib, bindepends, misc, trees, destdir
-        for i in range(len(self.zlib)):
-            # z refers to the section name
-            z = self.zlib[i]
-            nm = self.cfg.get(z, 'name')
-            try:
-                self.toc.merge([resource.makeresource(nm, ['.'])])
-            except ValueError:
-                # zlibs aren't written if they turn out to be empty
-                self.zlib[i] = None
-        self.zlib = filter(None, self.zlib)
-        if self.zlib:
-            target = built.get(self.zlib[0], None)
-            if target:
-                self.toc.merge(target._dependencies)
-        for script in self.bindepends:
-            rsrc = resource.makeresource(script, self.pathprefix)
-            self.toc.merge(rsrc.binaries)
-        logfile.write('ltoc after bindepends:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        for thingie in self.misc:
-            if thingie in self.cfg.sections():
-                name = self.cfg.get(thingie, "name")
-                typ = self.cfg.get(thingie, "type")
-                klass = self._rsrcdict.get(typ, resource.dataresource)
-                rsrc = apply(klass, (name, name))
-                #now make sure we have the stuff the resource requires
-                target = built.get(thingie, None)
-                if target:
-                    self.toc.merge(target._dependencies)
-            else:
-                rsrc = resource.makeresource(thingie, self.pathprefix)
-            self.toc.merge(rsrc.contents())
-        logfile.write('ltoc after misc:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        for script in self.script:
-            if string.find(script, '.') == -1:
-                script = script + '.py'
-            rsrc = resource.makeresource(script, self.pathprefix)
-            if rsrc.typ == 'm':
-                rsrc.typ = 's'
-            self.toc.merge([rsrc])
-        logfile.write('ltoc after scripts:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        for tree in self.trees:
-            try:
-                rsrc = resource.treeresource('.', tree)
-            except ValueError:
-                print "tree %s not found" % tree
-            else:
-                self.toc.merge(rsrc.contents())
-        logfile.write('ltoc after trees:\n')
-        pprint.pprint(self.toc.toList(), logfile)
-        self.toc.addFilter(tocfilter.TypeFilter(['d']))
-        logfile.write("Applying the following filters:\n")
-        pprint.pprint(self.toc.filters, logfile)
-        self.toc.filter()
-        #don't dupe stuff in a zlib that's part of this target
-        if self.zlib:
-           ztoc = ltoc.lTOC()
-           for zlibnm in self.zlib:
-               target = built.get(zlibnm, None)
-               if target:
-                   ztoc.merge(target.toc)
-           for i in range(len(self.toc)-1, -1, -1):
-               rsrc = self.toc[i]
-               if isinstance(rsrc, resource.moduleresource) and rsrc in ztoc:
-                   del self.toc[i]
-
-    def assemble(self):
-        if os.path.exists(self.name):
-            if os.path.isdir(self.name):
-                for fnm in os.listdir(self.name):
-                    try:
-                        os.remove(os.path.join(self.name, fnm))
-                    except:
-                        print "Could not delete file %s" % os.path.join(self.name, fnm)
-        else:
-            os.makedirs(self.name)
-        mysite = []
-        for nm, path, typ in self.toc.toList():
-            shutil.copy2(path, self.name)
-            if typ == 'z':
-                mysite.append('imputil.FuncImporter(archive.ZlibArchive("%s", 0).get_code).install()' % nm)
-        if mysite:
-            mysite.insert(0, 'import archive, imputil')
-            open(os.path.join(self.name, 'site.py'),'w').write(string.join(mysite, '\n'))
-
-
-class ArchiveTarget(CollectTarget):
-    usefullname = 1
-    def __init__(self, cfg, sectnm, cnvrts):
-        CollectTarget.__init__(self, cfg, sectnm, cnvrts)
-        archivebuilder.GetCompiled([os.path.join(pyinsthome, 'carchive_rt.py')])
-        carchmodule = resource.makeresource('carchive_rt.py', [pyinsthome])
-        self._dependencies.merge(carchmodule.dependencies())
-        self._dependencies.append(carchmodule)
-
-    def edit(self):
-        if self.destdir:
-            print "Warning 'destdir = %s' ignored for %s" % (self.destdir, self.name)
-
-    def gather(self):
-        CollectTarget.gather(self)
-
-    _cdict = {'s':2,'m':1,'b':1,'x':1,'a':0,'z':0, 'p':1}
-
-    def assemble(self, pkgnm=None):
-        if pkgnm is None:
-            pkgnm = self.name
-        arch = carchive.CArchive()
-        toc = []
-        pytoc = []
-        for nm, path, typ in self.toc.toList():
-            compress = self._cdict[typ]
-            if typ == 'b' or (self.usefullname and typ in 'ms'):
-                nm = os.path.basename(path)
-            if typ == 'm':
-                pytoc.append((nm, path, compress, typ))
-            else:
-                toc.append((nm, path, compress, typ))
-        toc = toc + archivebuilder.GetCompiled(pytoc)
-        arch.build(pkgnm, toc)
-        return arch
-
-class FullExeTarget(ArchiveTarget):
-    usefullname = 0
-    def __init__(self, cfg, sectnm, cnvrts):
-        ArchiveTarget.__init__(self, cfg, sectnm, cnvrts)
-
-    def gather(self):
-        for script in self.script:
-            #print "FullExeTarget.gather: script is", repr(script)
-            rsrc = resource.makeresource(script, self.pathprefix)
-            rsrc = resource.scriptresource(rsrc.name, rsrc.path)
-            #print " resource is", repr(rsrc)
-            self.toc.merge(rsrc.binaries)
-        ArchiveTarget.gather(self)
-        if not self.zlib:
-            self.toc.merge(rsrc.modules)
-        self._dependencies = ltoc.lTOC()
-
-    _cdict = {'s':2,'m':0,'b':1,'x':0,'a':0,'z':0}
-    _edict = { (1, 1):'Runw_d.exe', (1, 0):'Runw.exe', (0, 1):'Run_d.exe', (0, 0):'Run.exe'}
-
-    def assemble(self):
-        pkgname = tempfile.mktemp()
-        arch = ArchiveTarget.assemble(self, pkgname)
-        exe = self._edict[(self.userunw, self.debug)]
-        exe = os.path.normpath(os.path.join(pyinsthome, 'support', exe))
-##        copyFile([exe, pkgname], self.name)
-##        os.remove(pkgname)
-        # Thomas Heller's icon code
-        # my version
-        if self.icon:
-            myexe = tempfile.mktemp()
-            copyFile (exe, myexe)
-            try:
-                from icon import CopyIcons
-                CopyIcons(myexe, self.icon)
-            except ImportError:
-                print "win32api is required for updating icons"
-                print "You should have win32api.pyd and PyWinTypes20.dll"
-                print "in the installation directory."
-                print "Please copy them to Python's DLLS subdirectory"
-                print "(or install Mark Hammond's Win32 extensions)."
-##        iconfile = None
-##        for name in self.cfg.sections():
-##            if self.cfg.get (name, "type") == "STANDALONE":
-##                try:
-##                    iconfile = self.cfg.get (name, "iconfile")
-##                except:
-##                    pass
-##        if iconfile:
-##            from icon import CopyIcons
-##            CopyIcons (myexe, iconfile)
-            copyFile ([myexe, pkgname], self.name)
-            os.remove(myexe)
-        else:
-            copyFile([exe, pkgname], self.name)
-        #os.remove(pkgname)
-
-class ExeTarget(FullExeTarget):
-    def __init__(self, cfg, sectnm, cnvrts):
-        FullExeTarget.__init__(self, cfg, sectnm, cnvrts)
-
-    def edit(self):
-        if not self.script:
-            raise ValueError, "EXE target %s requires 'script= <script>'" % self.__name__
-
-    def gather(self):
-        FullExeTarget.gather(self)
-        for i in range(len(self.toc)-1, -1, -1):
-            rsrc = self.toc[i]
-            if rsrc.typ == 'b':
-                self._dependencies.append(rsrc)
-                del self.toc[i]
-
-installpreamble = """\
-import sys, os
-import installutils
-import carchive_rt
-idir = installutils.getinstalldir()
-me = sys.argv[0]
-if me[:-4] != '.exe':
-    me = me + '.exe'
-this = carchive_rt.CArchive(sys.argv[0])
-here = sys.path[0]
-"""
-mvfile = "installutils.copyFile(os.path.join(here, '%s'), os.path.join(idir, '%s'))\n"
-extractfile = "open(os.path.join(idir, '%s'), 'wb').write(this.extract('%s')[1])\n"
-sitepreamble = """\
-import archive_rt
-import imputil
-import sys
-"""
-importzlib = "imputil.FuncImporter(archive_rt.ZlibArchive(sys.path[0]+'/%s').get_code).install()\n"
-
-class InstallTarget(FullExeTarget):
-    def __init__(self, cfg, sectnm, cnvrts):
-        FullExeTarget.__init__(self, cfg, sectnm, cnvrts)
-
-    def edit(self):
-        if not self.script:
-            open('gen_install.py', 'w').write(installpreamble)
-            self.script = ['gen_install.py']
-
-    def gather(self):
-        FullExeTarget.gather(self)
-        if self.script[0] == 'gen_install.py':
-            f = open(self.script[0], 'a')
-            for rsrc in self.toc:
-                if isinstance(rsrc, resource.binaryresource):
-                    nm = os.path.basename(rsrc.path)
-                    f.write(mvfile % (nm, nm))
-                elif isinstance(rsrc, resource.pythonresource):
-                    pass
-                elif isinstance(rsrc, resource.zlibresource):
-                    pass
-                else:
-                    f.write(extractfile % (rsrc.name, rsrc.name))
-                    if isinstance(rsrc, resource.archiveresource):
-                        #did it come with an install script?
-                        target = built.get(rsrc.name, None)
-                        if target:
-                           if hasattr(target, "installscript"):
-                               for script in target.installscript:
-                                   s = resource.makeresource(script, self.pathprefix)
-                                   txt = open(s.path, 'r').read()
-                                   f.write(txt)
-            f.close()
-
-dispatch = {
-                'PYZ': PYZTarget,
-                'CARCHIVE': ArchiveTarget,
-                'COLLECT': CollectTarget,
-                'STANDALONE': ExeTarget,
-                'INSTALL': InstallTarget,
-                'FULLEXE': FullExeTarget,
-}
-
-
-def makeTarget(cfg, section):
-    return dispatch[cfg.get(section, 'type')](cfg, section, optcnvrts)
-
-optdefaults = { 'type':'PYZ',
-                'script':'',            # INSTALL (opt) & STANDALONE (required)
-                'zlib':'',              # INSTALL, STANDALONE, COLLECT
-                'bindepends':'',        # INSTALL, COLLECT
-                'misc':'',              # INSTALL. COLLECT
-                'includetk': '0',       # INSTALL, COLLECT
-        'userunw': '0',         # STANDALONE
-                'dependencies':'',      # PYZ
-                'directories':'',       # PYZ
-                'excludes':'',          # PYZ, INSTALL, COLLECT
-                'expatterns': '',
-                'exstdlib': '0',
-                'extypes': '',
-                'includes':'',          # PYZ
-                'packages':'',          # PYZ
-                'destdir':'',           # COLLECT
-                'pathprefix': '',
-                'trees': '',
-                'debug': '0',
-                'support': '1', # include python20.dll & exceptons.pyc at a minimum
-                'icon': '',
-}
-
-optcnvrts = {   'type':'',
-                'name': 'getstring',
-                'exstdlib': 'getbool',
-                'console': 'getbool',
-                'analyze': 'getbool',
-                'debug': 'getbool',
-                'includetk': 'getbool',
-                'userunw': 'getbool',
-                'destdir': 'getstring',
-                'support': 'getbool',
-                '__name__': 'getstring',
-                'icon': 'getstring',
-}
-def main(opts, args):
-    global pyinsthome
-    global copyFile
-    pyinsthome = os.path.abspath(os.path.dirname(sys.argv[0]))
-    # sys.path.insert(0, os.path.join(pyinsthome, 'support'))
-    import installutils
-    copyFile = installutils.copyFile
-    global logfile
-    logfile = open('Builder.log','w')
-    targets = []
-    xref = {}
-    cfg = ConfigParser.ConfigParser(optdefaults)
-    for arg in args:
-        dirnm = os.path.dirname(arg)
-        if dirnm == '':
-            dirnm = '.'
-        autopath.append(os.path.abspath(dirnm))
-    cfg.read(args)
-    for section in cfg.sections():
-        target = makeTarget(cfg, section)
-        targets.append(target)
-        xref[section] = target
-    while targets:
-        for i in range(len(targets)):
-            target = targets[i]
-            for child in target.children:
-                if xref[child] in targets:
-                    break
-            else:       #no break - ready to build
-                target.dump()
-                target.build()
-                built[target.__name__] = target
-                built[target.name] = target
-                targets[i] = None
-                break
-        else:       #no break - couldn't find anything to build
-            names = map(lambda x: getattr(x, 'name'), targets)
-            raise RuntimeError, "circular dependencies in %s" % repr(names)
-        targets = filter(None, targets)
-
-def run(file):
-    main ([], file)
-
-if __name__ == '__main__':
-    import getopt
-    (opts, args) = getopt.getopt(sys.argv[1:], 'dv')
-    print "opts:", opts
-    print "args:", args
-    main(opts, args)

+ 0 - 0
direct/src/pyinst/Sources.pp


+ 0 - 0
direct/src/pyinst/__init__.py


+ 0 - 246
direct/src/pyinst/archive.py

@@ -1,246 +0,0 @@
-#
-# Gordon McMillan (as inspired and influenced by Greg Stein)
-#
-
-# subclasses may not need marshal or struct, but since they're
-# builtin, importing is safe.
-#
-# While an Archive is really an abstraction for any "filesystem
-# within a file", it is tuned for use with imputil.FuncImporter.
-# This assumes it contains python code objects, indexed by the
-# the internal name (ie, no '.py').
-# See carchive.py for a more general archive (contains anything)
-# that can be understood by a C program.
-
-import marshal
-import struct
-
-class Archive:
-  """ A base class for a repository of python code objects.
-
-      The get_code method is used by imputil.FuntionImporter
-      to get code objects by name.
-      Archives are flat namespaces, so conflict between module
-      names in different packages are possible. Use a different
-      Archive for each package.
-  """
-  MAGIC = 'PYL\0'
-  HDRLEN = 12        # default is MAGIC followed by python's magic, int pos of toc
-  TOCPOS = 8
-  TRLLEN = 0        # default - no trailer
-  TOCTMPLT = {}     #
-  os = None
-  def __init__(self, path=None, start=0):
-    """
-         Initialize an Archive. If path is omitted, it will be an empty Archive.
-         start is the seek position within path where the Archive starts."""
-    self.toc = None
-    self.path = path
-    self.start = start
-    import imp
-    self.pymagic = imp.get_magic()
-    if path is not None:
-      self.lib = open(self.path, 'rb')
-      self.checkmagic()
-      self.loadtoc()
-
-  ####### Sub-methods of __init__ - override as needed #############
-  def checkmagic(self):
-    """Verify version and validity of file.
-
-        Overridable.
-        Check to see if the file object self.lib actually has a file
-        we understand.
-    """
-    self.lib.seek(self.start)   #default - magic is at start of file
-    if self.lib.read(len(self.MAGIC)) != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    if self.lib.read(len(self.pymagic)) != self.pymagic:
-      raise RuntimeError, "%s has version mismatch to dll" % (self.path)
-
-  def loadtoc(self):
-    """Load the table of contents.
-
-        Overridable.
-        Default: After magic comes an int (4 byte native) giving the
-        position of the TOC within self.lib.
-        Default: The TOC is a marshal-able string.
-    """
-    self.lib.seek(self.start + self.TOCPOS)
-    (offset,) = struct.unpack('=i', self.lib.read(4))
-    self.lib.seek(self.start + offset)
-    self.toc = marshal.load(self.lib)
-
-  ######## This is what is called by FuncImporter #######
-  ## Since an Archive is flat, we ignore parent and modname.
-
-  def get_code(self, parent, modname, fqname):
-    """The import hook.
-
-       Called by imputil.FunctionImporter.
-       Override extract to tune getting code from the Archive."""
-    rslt = self.extract(fqname) # None if not found, (ispkg, code) otherwise
-    if rslt is None:
-      return None
-    ispkg, code = rslt
-    if ispkg:
-      return ispkg, code, {'__path__': []}
-    return rslt
-
-  ####### Core method - Override as needed  #########
-  def extract(self, name):
-    """ Get the object corresponding to name, or None.
-
-        NAME is the name as specified in an 'import name'.
-        'import a.b' will become:
-        extract('a') (return None because 'a' is not a code object)
-        extract('a.__init__') (return a code object)
-        extract('a.b') (return a code object)
-        Default implementation:
-          self.toc is a dict
-          self.toc[name] is pos
-          self.lib has the code object marshal-ed at pos
-    """
-    ispkg, pos = self.toc.get(name, (0, None))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.load(self.lib)
-
-  ########################################################################
-  # Informational methods
-
-  def contents(self):
-    """Return a list of the contents.
-
-       Default implementation assumes self.toc is a dict like object.
-    """
-    return self.toc.keys()
-
-  ########################################################################
-  # Building
-
-  ####### Top level method - shouldn't need overriding #######
-  def build(self, path, lTOC):
-    """Create an archive file of name PATH from LTOC.
-
-       lTOC is a 'logical TOC' - a list of (name, path, ...)
-       where name is the internal (import) name,
-       and path is a file to get the object from, eg './a.pyc'.
-    """
-    self.path = path
-    self.lib = open(path, 'wb')
-    #reserve space for the header
-    if self.HDRLEN:
-      self.lib.write('\0'*self.HDRLEN)
-
-    #create an empty toc
-
-    if type(self.TOCTMPLT) == type({}):
-      self.toc = {}
-    else:       # assume callable
-      self.toc = self.TOCTMPLT()
-
-    for tocentry in lTOC:
-      self.add(tocentry)   # the guts of the archive
-
-    tocpos = self.lib.tell()
-    self.save_toc(tocpos)
-    if self.TRLLEN:
-      self.save_trailer(tocpos)
-    if self.HDRLEN:
-      self.update_headers(tocpos)
-    self.lib.close()
-
-
-  ####### manages keeping the internal TOC and the guts in sync #######
-  def add(self, entry):
-    """Add an entry to the archive.
-
-      Override this to influence the mechanics of the Archive.
-       Assumes entry is a seq beginning with (nm, pth, ...) where
-       nm is the key by which we'll be asked for the object.
-       pth is the name of where we find the object.
-    """
-    if self.os is None:
-      import os
-      self.os = os
-    nm = entry[0]
-    pth = entry[1]
-    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-    self.toc[nm] = (ispkg, self.lib.tell())
-    f = open(entry[1], 'rb')
-    f.seek(8)   #skip magic and timestamp
-    self.lib.write(f.read())
-
-  def save_toc(self, tocpos):
-    """Save the table of contents.
-
-       Default - toc is a dict
-       Gets marshaled to self.lib
-    """
-    marshal.dump(self.toc, self.lib)
-
-  def save_trailer(self, tocpos):
-    """Placeholder for Archives with trailers."""
-    pass
-
-  def update_headers(self, tocpos):
-    """Update any header data.
-
-       Default header is  MAGIC + Python's magic + tocpos"""
-    self.lib.seek(self.start)
-    self.lib.write(self.MAGIC)
-    self.lib.write(self.pymagic)
-    self.lib.write(struct.pack('=i', tocpos))
-
-##############################################################
-#
-# ZlibArchive - an archive with compressed entries
-#
-
-class ZlibArchive(Archive):
-  """A subclass of Archive that compresses entries with zlib
-     and uses a (marshalled) dict as a table of contents"""
-  MAGIC = 'PYZ\0'
-  TOCPOS = 8
-  HDRLEN = 12
-  TRLLEN = 0
-  TOCTMPLT = {}
-  LEVEL = 9
-
-  def __init__(self, path=None, offset=0):
-    Archive.__init__(self, path, offset)
-    # dynamic import so not imported if not needed
-    global zlib
-    import zlib
-
-  def extract(self, name):
-    """Get the code object for NAME.
-
-       Return None if name is not in the table of contents.
-       Otherwise, return a tuple (ispkg, code)"""
-    (ispkg, pos, lngth) = self.toc.get(name, (0, None, 0))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.loads(zlib.decompress(self.lib.read(lngth)))
-
-  def add(self, entry):
-    """Add an entry.
-
-       ENTRY is a sequence where entry[0] is name and entry[1] is full path name.
-       zlib compress the code object, and build a toc entry"""
-    if self.os is None:
-      import os
-      self.os = os
-    nm = entry[0]
-    pth = entry[1]
-    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-    f = open(pth, 'rb')
-    f.seek(8)   #skip magic and timestamp
-    obj = zlib.compress(f.read(), self.LEVEL)
-    self.toc[nm] = (ispkg, self.lib.tell(), len(obj))
-    self.lib.write(obj)
-

+ 0 - 226
direct/src/pyinst/archive_rt.py

@@ -1,226 +0,0 @@
-#
-# Gordon McMillan (as inspired and influenced by Greg Stein)
-#
-
-# subclasses may not need marshal or struct, but since they're
-# builtin, importing is safe.
-#
-# While an Archive is really an abstraction for any "filesystem
-# within a file", it is tuned for use with imputil.FuncImporter.
-# This assumes it contains python code objects, indexed by the
-# the internal name (ie, no '.py').
-# See carchive.py for a more general archive (contains anything)
-# that can be understood by a C program.
-
-#archive_rt is a stripped down version of MEInc.Dist.archive.
-#It has had all building logic removed.
-#It's purpose is to bootstrap the Python installation.
-
-import marshal
-import struct
-
-class Archive:
-  """ A base class for a repository of python code objects.
-      The extract method is used by imputil.ArchiveImporter
-      to get code objects by name (fully qualified name), so
-      an enduser "import a.b" would become
-        extract('a.__init__')
-        extract('a.b')
-  """
-  MAGIC = 'PYL\0'
-  HDRLEN = 12        # default is MAGIC followed by python's magic, int pos of toc
-  TOCPOS = 8
-  TRLLEN = 0        # default - no trailer
-  TOCTMPLT = {}     #
-  os = None
-  def __init__(self, path=None, start=0):
-    "Initialize an Archive. If path is omitted, it will be an empty Archive."
-    self.toc = None
-    self.path = path
-    self.start = start
-    import imp
-    self.pymagic = imp.get_magic()
-    if path is not None:
-      self.lib = open(self.path, 'rb')
-      self.checkmagic()
-      self.loadtoc()
-
-  ####### Sub-methods of __init__ - override as needed #############
-  def checkmagic(self):
-    """ Overridable.
-        Check to see if the file object self.lib actually has a file
-        we understand.
-    """
-    self.lib.seek(self.start)   #default - magic is at start of file
-    if self.lib.read(len(self.MAGIC)) != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    if self.lib.read(len(self.pymagic)) != self.pymagic:
-      raise RuntimeError, "%s has version mismatch to dll" % (self.path)
-
-  def loadtoc(self):
-    """ Overridable.
-        Default: After magic comes an int (4 byte native) giving the
-        position of the TOC within self.lib.
-        Default: The TOC is a marshal-able string.
-    """
-    self.lib.seek(self.start + self.TOCPOS)
-    (offset,) = struct.unpack('=i', self.lib.read(4))
-    self.lib.seek(self.start + offset)
-    self.toc = marshal.load(self.lib)
-
-  ######## This is what is called by FuncImporter #######
-  ## Since an Archive is flat, we ignore parent and modname.
-
-  def get_code(self, parent, modname, fqname):
-    print "parent: ", parent
-    print "modname: ", modname
-    print "fqname: ", fqname
-    return self.extract(fqname) # None if not found, (ispkg, code) otherwise
-    if rslt is None:
-      return None
-    ispkg, code = rslt
-    if ispkg:
-      return ispkg, code, {'__path__': []}
-    return rslt
-
-  ####### Core method - Override as needed  #########
-  def extract(self, name):
-    """ Get the object corresponding to name, or None.
-        For use with imputil ArchiveImporter, object is a python code object.
-        'name' is the name as specified in an 'import name'.
-        'import a.b' will become:
-        extract('a') (return None because 'a' is not a code object)
-        extract('a.__init__') (return a code object)
-        extract('a.b') (return a code object)
-        Default implementation:
-          self.toc is a dict
-          self.toc[name] is pos
-          self.lib has the code object marshal-ed at pos
-    """
-    ispkg, pos = self.toc.get(name, (0, None))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.load(self.lib)
-
-  ########################################################################
-  # Informational methods
-
-  def contents(self):
-    """Return a list of the contents
-       Default implementation assumes self.toc is a dict like object.
-       Not required by ArchiveImporter.
-    """
-    return self.toc.keys()
-
-  ########################################################################
-  # Building
-
-  ####### Top level method - shouldn't need overriding #######
-##  def build(self, path, lTOC):
-##    """Create an archive file of name 'path'.
-##       lTOC is a 'logical TOC' - a list of (name, path, ...)
-##       where name is the internal name, eg 'a'
-##       and path is a file to get the object from, eg './a.pyc'.
-##    """
-##    self.path = path
-##    self.lib = open(path, 'wb')
-##    #reserve space for the header
-##    if self.HDRLEN:
-##      self.lib.write('\0'*self.HDRLEN)
-##
-##    #create an empty toc
-##
-##    if type(self.TOCTMPLT) == type({}):
-##      self.toc = {}
-##    else:       # assume callable
-##      self.toc = self.TOCTMPLT()
-##
-##    for tocentry in lTOC:
-##      self.add(tocentry)   # the guts of the archive
-##
-##    tocpos = self.lib.tell()
-##    self.save_toc(tocpos)
-##    if self.TRLLEN:
-##      self.save_trailer(tocpos)
-##    if self.HDRLEN:
-##      self.update_headers(tocpos)
-##    self.lib.close()
-##
-##
-##  ####### manages keeping the internal TOC and the guts in sync #######
-##  def add(self, entry):
-##    """Override this to influence the mechanics of the Archive.
-##       Assumes entry is a seq beginning with (nm, pth, ...) where
-##       nm is the key by which we'll be asked for the object.
-##       pth is the name of where we find the object. Overrides of
-##       get_obj_from can make use of further elements in entry.
-##    """
-##    if self.os is None:
-##      import os
-##      self.os = os
-##    nm = entry[0]
-##    pth = entry[1]
-##    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-##    self.toc[nm] = (ispkg, self.lib.tell())
-##    f = open(entry[1], 'rb')
-##    f.seek(8) #skip magic and timestamp
-##    self.lib.write(f.read())
-##
-##  def save_toc(self, tocpos):
-##    """Default - toc is a dict
-##       Gets marshaled to self.lib
-##    """
-##    marshal.dump(self.toc, self.lib)
-##
-##  def save_trailer(self, tocpos):
-##    """Default - not used"""
-##    pass
-##
-##  def update_headers(self, tocpos):
-##    """Default - MAGIC + Python's magic + tocpos"""
-##    self.lib.seek(self.start)
-##    self.lib.write(self.MAGIC)
-##    self.lib.write(self.pymagic)
-##    self.lib.write(struct.pack('=i', tocpos))
-
-##############################################################
-#
-# ZlibArchive - an archive with compressed entries
-#
-
-class ZlibArchive(Archive):
-  MAGIC = 'PYZ\0'
-  TOCPOS = 8
-  HDRLEN = 12
-  TRLLEN = 0
-  TOCTMPLT = {}
-  LEVEL = 9
-
-  def __init__(self, path=None, offset=0):
-    Archive.__init__(self, path, offset)
-    # dynamic import so not imported if not needed
-    global zlib
-    import zlib
-
-  def extract(self, name):
-    (ispkg, pos, lngth) = self.toc.get(name, (0, None, 0))
-    if pos is None:
-      return None
-    self.lib.seek(self.start + pos)
-    return ispkg, marshal.loads(zlib.decompress(self.lib.read(lngth)))
-
-##  def add(self, entry):
-##    if self.os is None:
-##      import os
-##      self.os = os
-##    nm = entry[0]
-##    pth = entry[1]
-##    ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
-##    f = open(pth, 'rb')
-##    f.seek(8) #skip magic and timestamp
-##    obj = zlib.compress(f.read(), self.LEVEL)
-##    self.toc[nm] = (ispkg, self.lib.tell(), len(obj))
-##    self.lib.write(obj)
-##

+ 0 - 81
direct/src/pyinst/archivebuilder.py

@@ -1,81 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-# Gordon McMillan [email protected]
-#
-# A collection of routines for building a logical Table Of Contents
-# that Archive (subclasses) use to build themselves.
-# A logical Table of Contents is a sequence, each element of which is
-# a sequence, with at least 2 entries - "name" and "path".
-
-import os
-
-import string
-
-import py_compile
-
-def GetCompiled(seq, lvl='c'):
-  """SEQ is a list of .py files, or a logical TOC.
-     Return as .pyc or .pyo files (LVL) after ensuring their existence"""
-  if len(seq) == 0:
-    return seq
-  rslt = []
-  isTOC = 0
-  if type(seq[0]) == type(()):
-    isTOC = 1
-  for py in seq:
-    if isTOC:
-      (nm, fnm), rest = py[:2], py[2:]
-    else:
-      fnm = py
-    fnm = os.path.splitext(fnm)[0] + '.py'
-    cmpl = 1
-    pyc = fnm + lvl
-    if os.path.exists(pyc):
-      pytm = long(os.stat(fnm)[8])
-      ctm = long(os.stat(pyc)[8])
-      if pytm < ctm:
-        cmpl = 0
-    if cmpl:
-      py_compile.compile(fnm, pyc)
-    if isTOC:
-      rslt.append((nm, pyc)+rest)
-    else:
-      rslt.append(pyc)
-  return rslt
-
-import modulefinder
-MF = modulefinder
-import sys
-
-def Dependencies(script):
-  """Get a logical TOC directly from the dependencies of a script.
-  
-     The returned TOC does NOT contain the script.
-     It does contain extension modules. Uses modulefinder."""
-  rslt = []
-  (dir, name) = os.path.split(script)
-  if dir:
-    ppath = [os.path.normpath(dir)] + sys.path
-  else:
-    ppath = sys.path[:]
-  mf = MF.ModuleFinder(ppath, 0)
-  try:
-    mf.run_script(script)
-  except IOError:
-    print " Script not found:", script
-    return []
-  del mf.modules['__main__']
-  for (k, v) in mf.modules.items():
-    if v.__file__ is None:
-      del mf.modules[k]  # a builtin
-  for (k, v) in mf.modules.items():
-    #ispkg = os.path.basename(v.__file__) == '__init__.py'
-    d = os.path.dirname(v.__file__)
-    if not d:
-      v.__file__ = os.path.join(os.getcwd(), v.__file__)
-    #if ispkg:
-    #    rslt.append(k+'.__init__', v.__file__)
-    #else:
-    rslt.append((k, v.__file__))
-  return rslt
-

+ 0 - 169
direct/src/pyinst/bindepend.py

@@ -1,169 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-#
-# use dumpbin.exe (if present) to find the binary
-# dependencies of an extension module.
-# if dumpbin not available, pick apart the PE hdr of the binary
-# while this appears to work well, it is complex and subject to
-# problems with changes to PE hdrs (ie, this works only on 32 bit Intel
-# Windows format binaries)
-#
-# Note also that you should check the results to make sure that the
-# dlls are redistributable. I've listed most of the common MS dlls
-# under "excludes" below; add to this list as necessary (or use the
-# "excludes" option in the INSTALL section of the config file).
-
-import os
-import time
-import string
-import sys
-import tempfile
-import finder
-
-seen = {}
-excludes = {'KERNEL32.DLL':1,
-      'ADVAPI.DLL':1,
-      'MSVCRT.DLL':1,
-      'ADVAPI32.DLL':1,
-      'COMCTL32.DLL':1,
-      'CRTDLL.DLL':1,
-      'GDI32.DLL':1,
-      'MFC42.DLL':1,
-      'NTDLL.DLL':1,
-      'OLE32.DLL':1,
-      'OLEAUT32.DLL':1,
-      'RPCRT4.DLL':1,
-      'SHELL32.DLL':1,
-      'USER32.DLL':1,
-      'WINSPOOL.DRV':1,
-      'WS2HELP.DLL':1,
-      'WS2_32.DLL':1,
-      'WSOCK32.DLL':1,
-      'WINMM.DLL':1,
-      'COMDLG32.DLL':1,
-      'ZLIB.DLL':1,
-      'ODBC32.DLL':1,
-      'VERSION.DLL':1}
-
-def getfullnameof(mod, xtrapath = None):
-  """Return the full path name of MOD.
-
-      MOD is the basename of a dll or pyd.
-      XTRAPATH is a path or list of paths to search first.
-      Return the full path name of MOD.
-      Will search the full Windows search path, as well as sys.path"""
-  epath = finder.getpath()
-  if mod[-4:] in ('.pyd', '.PYD'):
-    epath = epath + sys.path
-  if xtrapath is not None:
-    if type(xtrapath) == type(''):
-      epath.insert(0, xtrapath)
-    else:
-      epath = xtrapath + epath
-  for p in epath:
-    npth = os.path.join(p, mod)
-    if os.path.exists(npth):
-      return npth
-  return ''
-
-def getImports1(pth):
-    """Find the binary dependencies of PTH.
-
-        This implementation (not used right now) uses the MSVC utility dumpbin"""
-    rslt = []
-    tmpf = tempfile.mktemp()
-    os.system('dumpbin /IMPORTS "%s" >%s' %(pth, tmpf))
-    time.sleep(0.1)
-    txt = open(tmpf,'r').readlines()
-    os.remove(tmpf)
-    i = 0
-    while i < len(txt):
-        tokens = string.split(txt[i])
-        if len(tokens) == 1 and string.find(tokens[0], '.') > 0:
-            rslt.append(string.strip(tokens[0]))
-        i = i + 1
-    return rslt
-
-def getImports2(pth):
-    """Find the binary dependencies of PTH.
-
-        This implementation walks through the PE header"""
-    import struct
-    rslt = []
-    try:
-      f = open(pth, 'rb').read()
-      pehdrd = struct.unpack('l', f[60:64])[0]
-      magic = struct.unpack('l', f[pehdrd:pehdrd+4])[0]
-      numsecs = struct.unpack('h', f[pehdrd+6:pehdrd+8])[0]
-      numdirs = struct.unpack('l', f[pehdrd+116:pehdrd+120])[0]
-      idata = ''
-      if magic == 17744:
-          importsec, sz = struct.unpack('2l', f[pehdrd+128:pehdrd+136])
-          secttbl = pehdrd + 120 + 8*numdirs
-          secttblfmt = '8s7l2h'
-          seclist = []
-          for i in range(numsecs):
-              seclist.append(struct.unpack(secttblfmt, f[secttbl+i*40:secttbl+(i+1)*40]))
-              #nm, vsz, va, rsz, praw, preloc, plnnums, qrelocs, qlnnums, flags \
-              # = seclist[-1]
-          for i in range(len(seclist)-1):
-              if seclist[i][2] <= importsec < seclist[i+1][2]:
-                  break
-          vbase = seclist[i][2]
-          raw = seclist[i][4]
-          idatastart = raw + importsec - vbase
-          idata = f[idatastart:idatastart+seclist[i][1]]
-          i = 0
-          while 1:
-              vsa =  struct.unpack('5l', idata[i*20:i*20+20])[3]
-              if vsa == 0:
-                  break
-              sa = raw + vsa - vbase
-              end = string.find(f, '\000', sa)
-              rslt.append(f[sa:end])
-              i = i + 1
-    except IOError:
-      print "bindepend cannot analyze %s - file not found!"
-    except struct.error:
-      print "bindepend cannot analyze %s - error walking thru pehdr"
-    return rslt
-
-def Dependencies(lTOC):
-  """Expand LTOC to include all the closure of binary dependencies.
-
-     LTOC is a logical table of contents, ie, a seq of tuples (name, path).
-     Return LTOC expanded by all the binary dependencies of the entries
-     in LTOC, except those listed in the module global EXCLUDES"""
-  for (nm, pth) in lTOC:
-    fullnm = string.upper(os.path.basename(pth))
-    if seen.get(string.upper(nm), 0):
-      continue
-    print "analyzing", nm
-    seen[string.upper(nm)] = 1
-    dlls = getImports(pth)
-    for lib in dlls:
-        print " found", lib
-        if excludes.get(string.upper(lib), 0):
-          continue
-        if seen.get(string.upper(lib), 0):
-          continue
-        npth = getfullnameof(lib)
-        if npth:
-          lTOC.append((lib, npth))
-        else:
-          print " lib not found:", lib, "dependency of",
-  return lTOC
-
-
-##if getfullnameof('dumpbin.exe') == '':
-##    def getImports(pth):
-##        return getImports2(pth)
-##else:
-##    def getImports(pth):
-##        return getImports1(pth)
-
-def getImports(pth):
-    """Forwards to either getImports1 or getImports2
-    """
-    return getImports2(pth)
-

+ 0 - 204
direct/src/pyinst/carchive.py

@@ -1,204 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-#
-# A subclass of Archive that can be understood
-# by a C program. See uplaunch.cpp for unpacking
-# from C.
-import archive
-import struct
-import zlib
-import strop
-
-class CTOC:
-  """A class encapsulating the table of contents of a CArchive.
-  
-     When written to disk, it is easily read from C."""
-  ENTRYSTRUCT = 'iiiibc' #(structlen, dpos, dlen, ulen, flag, typcd) followed by name
-  def __init__(self):
-    self.data = []
-  
-  def frombinary(self, s):
-    """Decode the binary string into an in memory list.
-    
-        S is a binary string."""
-    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-    p = 0
-    while p<len(s):
-      (slen, dpos, dlen, ulen, flag, typcd) = struct.unpack(self.ENTRYSTRUCT, 
-                                                  s[p:p+entrylen]) 
-      nmlen = slen - entrylen 
-      p = p + entrylen
-      (nm,) = struct.unpack(repr(nmlen)+'s', s[p:p+nmlen])
-      p = p + nmlen 
-      self.data.append((dpos, dlen, ulen, flag, typcd, nm[:-1]))
-
-  def tobinary(self):
-    """Return self as a binary string."""
-    import string
-    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-    rslt = []
-    for (dpos, dlen, ulen, flag, typcd, nm) in self.data:
-      nmlen = len(nm) + 1       # add 1 for a '\0'
-      rslt.append(struct.pack(self.ENTRYSTRUCT+repr(nmlen)+'s',
-        nmlen+entrylen, dpos, dlen, ulen, flag, typcd, nm+'\0'))
-    return string.join(rslt, '')
-
-  def add(self, dpos, dlen, ulen, flag, typcd, nm):
-    """Add an entry to the table of contents.
-    
-       DPOS is data position.
-       DLEN is data length.
-       ULEN is the uncompressed data len.
-       FLAG says if the data is compressed.
-       TYPCD is the "type" of the entry (used by the C code)
-       NM is the entry's name."""
-    self.data.append((dpos, dlen, ulen, flag, typcd, nm))
-
-  def get(self, ndx):
-    """return the toc entry (tuple) at index NDX"""
-    return self.data[ndx]
-
-  def __getitem__(self, ndx):
-    return self.data[ndx]
-
-  def find(self, name):
-    """Return the index of the toc entry with name NAME.
-    
-       Return -1 for failure."""
-    for i in range(len(self.data)):
-      if self.data[i][-1] == name:
-        return i
-    return -1
-
-class CArchive(archive.Archive):
-  """An Archive subclass that an hold arbitrary data.
-  
-     Easily handled from C or from Python."""
-  MAGIC = 'MEI\014\013\012\013\015'
-  HDRLEN = 0
-  TOCTMPLT = CTOC
-  TRLSTRUCT = '8siii'
-  TRLLEN = 20
-  LEVEL = 9
-  def __init__(self, path=None, start=0, len=0):
-    """Constructor.
-    
-       PATH is path name of file (create an empty CArchive if path is None).
-       START is the seekposition within PATH.
-       LEN is the length of the CArchive (if 0, then read till EOF). """
-    self.len = len
-    archive.Archive.__init__(self, path, start)
-
-  def checkmagic(self):
-    """Verify that self is a valid CArchive.
-    
-        Magic signature is at end of the archive."""
-    #magic is at EOF; if we're embedded, we need to figure where that is
-    if self.len:
-      self.lib.seek(self.start+self.len, 0)
-    else:
-      self.lib.seek(0, 2)
-    filelen = self.lib.tell()
-    if self.len:
-      self.lib.seek(self.start+self.len-self.TRLLEN, 0)
-    else:
-      self.lib.seek(-self.TRLLEN, 2)
-    (magic, totallen, tocpos, toclen) = struct.unpack(self.TRLSTRUCT, 
-                                                self.lib.read(self.TRLLEN))
-    if magic != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    self.pkgstart = filelen - totallen
-    if self.len:
-      if totallen != self.len or self.pkgstart != self.start:
-        raise RuntimeError, "Problem with embedded archive in %s" % self.path
-    self.tocpos, self.toclen = tocpos, toclen
-
-  def loadtoc(self):
-    """Load the table of contents into memory."""
-    self.toc = self.TOCTMPLT()
-    self.lib.seek(self.pkgstart+self.tocpos)
-    tocstr = self.lib.read(self.toclen)
-    self.toc.frombinary(tocstr)
-
-  def extract(self, name):
-    """Get the contents of an entry.
-    
-       NAME is an entry name.
-       Return the tuple (ispkg, contents).
-       For non-Python resoures, ispkg is meaningless (and 0).
-       Used by the import mechanism."""
-    if type(name) == type(''):
-      ndx = self.toc.find(name)
-      if ndx == -1:
-        return None
-    else:
-      ndx = name
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    self.lib.seek(self.pkgstart+dpos)
-    rslt = self.lib.read(dlen)
-    if flag == 1:
-      rslt = zlib.decompress(rslt)
-    if typcd == 'M':
-      return (1, rslt)
-    return (0, rslt)
-
-  def contents(self):
-    """Return the names of the entries"""
-    rslt = []
-    for (dpos, dlen, ulen, flag, typcd, nm) in self.toc:
-      rslt.append(nm)
-    return rslt
-
-  def add(self, entry):
-    """Add an ENTRY to the CArchive.
-    
-       ENTRY must have:
-         entry[0] is name (under which it will be saved).
-         entry[1] is fullpathname of the file.
-         entry[2] is a flag for it's storage format (0==uncompressed,
-         1==compressed, 2==Python source format)
-         entry[3] is the entry's type code."""
-    (nm, pathnm, flag, typcd) = entry[:4]
-    if flag == 2:
-        s = open(pathnm, 'r').read()
-        s = s + '\n\0'
-    else:
-        s = open(pathnm, 'rb').read()
-    ulen = len(s)
-    if flag == 1:
-      s = zlib.compress(s, self.LEVEL)
-    dlen = len(s)
-    where = self.lib.tell()
-    if typcd == 'm':
-      if strop.find(pathnm, '.__init__.py') > -1:
-        typcd = 'M'
-    self.toc.add(where, dlen, ulen, flag, typcd, nm)
-    self.lib.write(s)
-
-  def save_toc(self, tocpos):
-    """Save the table of contents to disk."""
-    self.tocpos = tocpos
-    tocstr = self.toc.tobinary()
-    self.toclen = len(tocstr)
-    self.lib.write(tocstr)
-
-  def save_trailer(self, tocpos):
-    """Save the trailer to disk.
-    
-       CArchives can be opened from the end - the trailer points
-       back to the start. """
-    totallen = tocpos + self.toclen + self.TRLLEN
-    trl = struct.pack(self.TRLSTRUCT, self.MAGIC, totallen, 
-                      tocpos, self.toclen)
-    self.lib.write(trl)
-
-  def openEmbedded(self, name):
-    """Open a CArchive of name NAME embedded within this CArchive."""
-    ndx = self.toc.find(name)
-    if ndx == -1:
-      raise KeyError, "Member '%s' not found in %s" % (name, self.path)
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    if flag:
-      raise ValueError, "Cannot open compressed archive %s in place"
-    return CArchive(self.path, self.pkgstart+dpos, dlen)

+ 0 - 157
direct/src/pyinst/carchive_rt.py

@@ -1,157 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# license: use as you please. No warranty.
-#
-# A subclass of Archive that can be understood
-# by a C program. See uplaunch.cpp for unpacking
-# from C.
-
-#carchive_rt is a stripped down version of MEInc.Dist.carchive.
-#It has had all building logic removed.
-#It's purpose is to bootstrap the Python installation.
-
-import archive_rt
-import struct
-import zlib
-import strop
-
-class CTOC:
-  ENTRYSTRUCT = 'iiiibc' #(structlen, dpos, dlen, ulen, flag, typcd) followed by name
-  def __init__(self):
-    self.data = []
-  
-  def frombinary(self, s):
-    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-    p = 0
-    while p<len(s):
-      (slen, dpos, dlen, ulen, flag, typcd) = struct.unpack(self.ENTRYSTRUCT, 
-                                                  s[p:p+entrylen]) 
-      nmlen = slen - entrylen 
-      p = p + entrylen
-      (nm,) = struct.unpack(repr(nmlen)+'s', s[p:p+nmlen])
-      p = p + nmlen 
-      self.data.append((dpos, dlen, ulen, flag, typcd, nm[:-1]))
-
-##  def tobinary(self):
-##    import string
-##    entrylen = struct.calcsize(self.ENTRYSTRUCT)
-##    rslt = []
-##    for (dpos, dlen, ulen, flag, typcd, nm) in self.data:
-##      nmlen = len(nm) + 1     # add 1 for a '\0'
-##      rslt.append(struct.pack(self.ENTRYSTRUCT+repr(nmlen)+'s',
-##        nmlen+entrylen, dpos, dlen, ulen, flag, typcd, nm+'\0'))
-##    return string.join(rslt, '')
-##
-##  def add(self, dpos, dlen, ulen, flag, typcd, nm):
-##    self.data.append(dpos, dlen, ulen, flag, typcd, nm)
-
-  def get(self, ndx):
-    return self.data[ndx]
-
-  def __getitem__(self, ndx):
-    return self.data[ndx]
-
-  def find(self, name):
-    for i in range(len(self.data)):
-      if self.data[i][-1] == name:
-        return i
-    return -1
-
-class CArchive(archive_rt.Archive):
-  MAGIC = 'MEI\014\013\012\013\015'
-  HDRLEN = 0
-  TOCTMPLT = CTOC
-  TRLSTRUCT = '8siii'
-  TRLLEN = 20
-  LEVEL = 9
-  def __init__(self, path=None, start=0, len=0):
-    self.len = len
-    archive_rt.Archive.__init__(self, path, start)
-
-  def checkmagic(self):
-    #magic is at EOF; if we're embedded, we need to figure where that is
-    if self.len:
-      self.lib.seek(self.start+self.len, 0)
-    else:
-      self.lib.seek(0, 2)
-    filelen = self.lib.tell()
-    if self.len:
-      self.lib.seek(self.start+self.len-self.TRLLEN, 0)
-    else:
-      self.lib.seek(-self.TRLLEN, 2)
-    (magic, totallen, tocpos, toclen) = struct.unpack(self.TRLSTRUCT, 
-                                                self.lib.read(self.TRLLEN))
-    if magic != self.MAGIC:
-      raise RuntimeError, "%s is not a valid %s archive file" \
-                % (self.path, self.__class__.__name__)
-    self.pkgstart = filelen - totallen
-    if self.len:
-      if totallen != self.len or self.pkgstart != self.start:
-        raise RuntimeError, "Problem with embedded archive in %s" % self.path
-    self.tocpos, self.toclen = tocpos, toclen
-
-  def loadtoc(self):
-    self.toc = self.TOCTMPLT()
-    self.lib.seek(self.pkgstart+self.tocpos)
-    tocstr = self.lib.read(self.toclen)
-    self.toc.frombinary(tocstr)
-
-  def extract(self, name):
-    if type(name) == type(''):
-      ndx = self.toc.find(name)
-      if ndx == -1:
-        return None
-    else:
-      ndx = name
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    self.lib.seek(self.pkgstart+dpos)
-    rslt = self.lib.read(dlen)
-    if flag == 1:
-      rslt = zlib.decompress(rslt)
-    if typcd == 'M':
-      return (1, rslt)
-    return (0, rslt)
-
-  def contents(self):
-    rslt = []
-    for (dpos, dlen, ulen, flag, typcd, nm) in self.toc:
-      rslt.append(nm)
-    return rslt
-
-##  def add(self, entry):
-##    (nm, pathnm, flag, typcd) = entry[:4]
-##    if flag == 2:
-##        s = open(pathnm, 'r').read()
-##        s = s + '\0'
-##    else:
-##        s = open(pathnm, 'rb').read()
-##    ulen = len(s)
-##    if flag == 1:
-##      s = zlib.compress(s, self.LEVEL)
-##    dlen = len(s)
-##    where = self.lib.tell()
-##    if typcd == 'm':
-##      if strop.find(pathnm, '.__init__.py') > -1:
-##        typcd = 'M'
-##    self.toc.add(where, dlen, ulen, flag, typcd, nm)
-##    self.lib.write(s)
-##
-##  def save_toc(self, tocpos):
-##    self.tocpos = tocpos
-##    tocstr = self.toc.tobinary()
-##    self.toclen = len(tocstr)
-##    self.lib.write(tocstr)
-##
-##  def save_trailer(self, tocpos):
-##    totallen = tocpos + self.toclen + self.TRLLEN
-##    trl = struct.pack(self.TRLSTRUCT, self.MAGIC, totallen, 
-##                      tocpos, self.toclen)
-##    self.lib.write(trl)
-
-  def openEmbedded(self, name):
-    ndx = self.toc.find(name)
-    if ndx == -1:
-      raise KeyError, "Member '%s' not found in %s" % (name, self.path)
-    (dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
-    if flag:
-      raise ValueError, "Cannot open compressed archive %s in place"
-    return CArchive(self.path, self.pkgstart+dpos, dlen)

+ 0 - 178
direct/src/pyinst/finder.py

@@ -1,178 +0,0 @@
-# copyright McMillan Enterprises, 1999
-import os, sys
-import string
-
-SCRIPT = 1
-GSCRIPT = 2
-MODULE = 3
-PACKAGE = 4
-PBINARY = 5
-BINARY = 6
-ZLIB = 7
-DIRECTORY = 8
-DATA = 9
-
-_bpath = None
-_ppath = None
-_pcache = {}
-
-def _locate(nm, xtrapath=None, base=None):
-    """Find a file / directory named NM in likely places.
-    
-       XTRAPATH is a list of paths to prepend to BASE.
-       If BASE is None, sys.path (as extended by packages) is used."""
-    ppath = base
-    if base is None:
-        ppath = _ppath
-    if xtrapath:
-        ppath = xtrapath + ppath
-    for pth in ppath:
-        fullnm = os.path.join(pth, nm)
-        #print " _locate trying", fullnm
-        if os.path.exists(fullnm):
-            break
-    else:
-        return ''
-    return fullnm
-
-def _locatepython(name, xtrapath=None):
-    """Locate a Python resource named NAME.
-    
-       All of the standard file extensions will be tried.
-       XTRAPATH is prepended to sys.path."""
-    for ext in ('.py', '.pyc', '.pyw', '.pyo', '.pyd', '.dll'):
-        fullnm = _locate(name+ext, xtrapath)
-        if fullnm:
-            break
-    else:
-        for ext in ('.pyd', '.dll'):
-            fullnm = _locate(name+ext, [], _bpath)
-            if fullnm:
-                break
-    return fullnm
-
-def ispackage(name):
-    """Determine if NAME is the name of a package."""
-    if os.path.exists(os.path.join(name, '__init__.py')):
-        return 1
-    if os.path.exists(os.path.join(name, '__init__.pyc')):
-        return 1
-    if os.path.exists(os.path.join(name, '__init__.pyo')):
-        return 1
-    return 0
-        
-def idtype(fullnm):
-    """Figure out what type of resource FULLNM refers to."""
-    if os.path.isdir(fullnm):
-        if ispackage(fullnm):
-            return PACKAGE
-        return DIRECTORY
-    ext = os.path.splitext(fullnm)[1]
-    if ext:
-        if ext == '.pyd':
-            return PBINARY
-        if ext == '.dll':
-            return BINARY
-        if ext in ('.pyc', '.pyo'):
-            return MODULE
-        if ext == '.py':
-            return SCRIPT
-        if ext == '.pyw':
-            return GSCRIPT
-        if ext == '.pyz':
-            return ZLIB
-    return DATA
-
-def identify(name, xtrapath=None):
-    """Find, and identify the type of NAME, using XTRAPATH as the
-       first place to look.
-
-       Return type, name and full path name.
-       NAME can be a logical or physical name. However, the logical
-       name of a Python module can easily conflict with the physical
-       name of something else, so beware."""
-    if os.path.exists(name):
-        fullnm = name
-    else:
-        if xtrapath is None:
-            xtra = []
-        elif id(xtrapath) in _pcache:
-            xtra = _pcache[id(xtrapath)]
-        else:
-            xtra = expand(xtrapath)
-            _pcache[id(xtrapath)] = xtra 
-        fullnm = _locate(name, xtra)
-        if not fullnm:
-            fullnm =  _locate(name, [], _bpath)
-            if not fullnm:
-                ext = os.path.splitext(name)[1]
-                if not ext:
-                    fullnm = _locatepython(name, xtra)
-                    if not fullnm:
-                        raise ValueError, "%s not found" % name
-                else:
-                    nm = name
-                    while string.count(nm, '.'):
-                        nm = string.replace(nm, '.', '/', 1)
-                        fullnm = _locatepython(nm, xtra)
-                        if fullnm:
-                            break
-                    else:
-                        raise ValueError, "%s not found" % name
-                    
-    typ = idtype(fullnm)
-    nm = name
-    if typ in (GSCRIPT, SCRIPT, MODULE, PACKAGE, PBINARY):
-        dir, nm = os.path.split(fullnm)
-        nm = os.path.splitext(nm)[0]
-    if typ == SCRIPT:
-        if os.path.exists(fullnm+'c') or os.path.exists(fullnm+'o'):
-            typ = MODULE
-    if typ in (MODULE, PACKAGE):
-        while idtype(dir) == PACKAGE:
-            dir, lnode = os.path.split(dir)
-            nm = lnode+'.'+nm
-    elif typ == BINARY:
-        nm = os.path.basename(fullnm)
-    return typ, nm, fullnm
- 
-def expand(plist):
-    """ expand a list of paths (like sys.path) to include all the 
-        directories that qualify as packages """
-    pkgdirs = []
-    for pth in plist:
-        os.path.walk(pth, pkgfinder, pkgdirs)
-    return plist + pkgdirs
-
-def pkgfinder(pkgdirs, dir, fnms):
-    i = 0
-    while i < len(fnms):
-        fnm = os.path.join(dir, fnms[i])
-        if os.path.isdir(fnm):
-            if ispackage(fnm):
-                pkgdirs.append(fnm)
-                i = i + 1
-            else:
-                del fnms[i]
-        else:
-            i = i + 1
-
-if _bpath is None:
-    try:
-        import win32api
-    except ImportError:
-        print "Cannot determine your Windows or System directories"
-        print "Please add them to your PATH if .dlls are not found"
-        _bpath = []
-    else:
-        sysdir = win32api.GetSystemDirectory()
-        sysdir2 = os.path.join(sysdir, '../SYSTEM')
-        windir = win32api.GetWindowsDirectory()
-        _bpath = [sysdir, sysdir2, windir]
-    _bpath.extend(string.split(os.environ.get('PATH', ''), ';'))
-if _ppath is None:
-    _ppath = expand(sys.path)
-        
-def getpath():
-    """Return the path that Windows will search for dlls."""
-    return _bpath

+ 0 - 138
direct/src/pyinst/icon.py

@@ -1,138 +0,0 @@
-# This code is courtesy of Thomas Heller, who
-# has kindly donated it to this project.
-RT_ICON = 3
-RT_GROUP_ICON = 14
-LOAD_LIBRARY_AS_DATAFILE = 2
-
-import struct
-
-class Structure:
-    def __init__ (self):
-        size = self._sizeInBytes = struct.calcsize (self._format_)
-        self._fields_ = list (struct.unpack (self._format_, '\000' * size))
-        indexes = self._indexes_ = {}
-        for i in range (len (self._names_)):
-            indexes[self._names_[i]] = i
-    def dump (self):
-        print "DUMP of", self
-        for name in self._names_:
-            if name[0] != '_':
-                print "%20s = %s" % (name, getattr (self, name))
-        print
-    def __getattr__ (self, name):
-        if name in self._names_:
-            index = self._indexes_[name]
-            return self._fields_[index]
-        try:
-            return self.__dict__[name]
-        except KeyError:
-            raise AttributeError, name
-    def __setattr__ (self, name, value):
-        if name in self._names_:
-            index = self._indexes_[name]
-            self._fields_[index] = value
-        else:
-            self.__dict__[name] = value
-    def tostring (self):
-        return apply (struct.pack, [self._format_,] + self._fields_)
-    def fromfile (self, file):
-        data = file.read (self._sizeInBytes)
-        self._fields_ = list (struct.unpack (self._format_, data))
-
-class ICONDIRHEADER (Structure):
-    _names_ = "idReserved", "idType", "idCount"
-    _format_ = "hhh"
-
-class ICONDIRENTRY (Structure):
-    _names_ = "bWidth", "bHeight", "bColorCount", "bReserved", "wPlanes", "wBitCount", "dwBytesInRes", "dwImageOffset"
-    _format_ = "bbbbhhii"
-
-class GRPICONDIR (Structure):
-    _names_ = "idReserved", "idType", "idCount"
-    _format_ = "hhh"
-
-class GRPICONDIRENTRY (Structure):
-    _names_ = "bWidth", "bHeight", "bColorCount", "bReserved", "wPlanes", "wBitCount", "dwBytesInRes", "nID"
-    _format_ = "bbbbhhih"
-
-class IconFile:
-    def __init__ (self, path):
-        self.path = path
-        file = open (path, "rb")
-        self.entries = []
-        self.images = []
-        header = self.header = ICONDIRHEADER()
-        header.fromfile (file)
-        for i in range (header.idCount):
-            entry = ICONDIRENTRY()
-            entry.fromfile (file)
-            self.entries.append (entry)
-        for e in self.entries:
-            file.seek (e.dwImageOffset, 0)
-            self.images.append (file.read (e.dwBytesInRes))
-
-    def grp_icon_dir (self):
-        return self.header.tostring()
-
-    def grp_icondir_entries (self):
-        data = ""
-        i = 1
-        for entry in self.entries:
-            e = GRPICONDIRENTRY()
-            for n in e._names_[:-1]:
-                setattr(e, n, getattr (entry, n))
-            e.nID = i
-            i = i + 1
-            data = data + e.tostring()
-        return data
-            
-
-def CopyIcons_FromIco (dstpath, srcpath):
-    f = IconFile (srcpath)
-    print "Updating icons from", srcpath, "to", dstpath
-    import win32api #, win32con
-    hdst = win32api.BeginUpdateResource (dstpath, 0)
-    data = f.grp_icon_dir()
-    data = data + f.grp_icondir_entries()
-    win32api.UpdateResource (hdst, RT_GROUP_ICON, 1, data)
-    print "Writing RT_GROUP_ICON resource with %d bytes" % len (data)
-    i = 1
-    for data in f.images:
-        win32api.UpdateResource (hdst, RT_ICON, i, data)
-        print "Writing RT_ICON resource with %d bytes" % len (data)
-        i = i + 1
-    win32api.EndUpdateResource (hdst, 0)
-
-def CopyIcons (dstpath, srcpath):
-    import os.path, string
-    index = None
-    try:
-        srcpath, index = map (string.strip, string.split (srcpath, ','))
-        index = int (index)
-    except:
-        pass
-    print "PATH, INDEX", srcpath, index
-    srcext = os.path.splitext (srcpath)[1]
-    if string.lower (srcext) == '.ico':
-        return CopyIcons_FromIco (dstpath, srcpath)
-    if index is not None:
-        print "Updating icons from", srcpath, ", %d to" % index, dstpath
-    else:
-        print "Updating icons from", srcpath, "to", dstpath
-    import win32api #, win32con
-    hdst = win32api.BeginUpdateResource (dstpath, 0)
-    hsrc = win32api.LoadLibraryEx (srcpath, 0, LOAD_LIBRARY_AS_DATAFILE)
-    if index is None:
-        grpname = win32api.EnumResourceNames (hsrc, RT_GROUP_ICON)[0]
-    elif index >= 0:
-        grpname = win32api.EnumResourceNames (hsrc, RT_GROUP_ICON)[index]
-    else:
-        grpname = -index
-    data = win32api.LoadResource (hsrc, RT_GROUP_ICON, grpname)
-    win32api.UpdateResource (hdst, RT_GROUP_ICON, grpname, data)
-    for iconname in win32api.EnumResourceNames (hsrc, RT_ICON):
-        data = win32api.LoadResource (hsrc, RT_ICON, iconname)
-        win32api.UpdateResource (hdst, RT_ICON, iconname, data)
-    win32api.FreeLibrary (hsrc)
-    win32api.EndUpdateResource (hdst, 0)
-

+ 0 - 487
direct/src/pyinst/imputil.py

@@ -1,487 +0,0 @@
-#
-# imputil.py
-#
-# Written by Greg Stein. Public Domain.
-# No Copyright, no Rights Reserved, and no Warranties.
-#
-# Utilities to help out with custom import mechanisms.
-#
-# Additional modifications were contribed by Marc-Andre Lemburg and
-# Gordon McMillan.
-#
-
-__version__ = '0.3'
-
-# note: avoid importing non-builtin modules
-import imp
-import sys
-import strop
-import __builtin__      ### why this instead of just using __builtins__ ??
-
-# for the DirectoryImporter
-import struct
-import marshal
-
-class Importer:
-  "Base class for replacing standard import functions."
-
-  def install(self):
-    self.__chain_import = __builtin__.__import__
-    self.__chain_reload = __builtin__.reload
-    __builtin__.__import__ = self._import_hook
-    __builtin__.reload = self._reload_hook
-
-  ######################################################################
-  #
-  # PRIVATE METHODS
-  #
-  def _import_hook(self, name, globals=None, locals=None, fromlist=None):
-    """Python calls this hook to locate and import a module.
-
-    This method attempts to load the (dotted) module name. If it cannot
-    find it, then it delegates the import to the next import hook in the
-    chain (where "next" is defined as the import hook that was in place
-    at the time this Importer instance was installed).
-    """
-
-    # determine the context of this import
-    parent = self._determine_import_context(globals)
-
-    # import the module within the context, or from the default context
-    top, tail = self._import_top_module(parent, name)
-    if top is None:
-      # the module was not found; delegate to the next import hook
-      return self.__chain_import(name, globals, locals, fromlist)
-
-    # the top module may be under the control of a different importer.
-    # if so, then defer to that importer for completion of the import.
-    # note it may be self, or is undefined so we (self) may as well
-    # finish the import.
-    importer = top.__dict__.get('__importer__', self)
-    return importer._finish_import(top, tail, fromlist)
-
-  def _finish_import(self, top, tail, fromlist):
-    # if "a.b.c" was provided, then load the ".b.c" portion down from
-    # below the top-level module.
-    bottom = self._load_tail(top, tail)
-
-    # if the form is "import a.b.c", then return "a"
-    if not fromlist:
-      # no fromlist: return the top of the import tree
-      return top
-
-    # the top module was imported by self, or it was not imported through
-    # the Importer mechanism and self is simply handling the import of
-    # the sub-modules and fromlist.
-    #
-    # this means that the bottom module was also imported by self, or we
-    # are handling things in the absence of a prior Importer
-    #
-    # ### why the heck are we handling it? what is the example scenario
-    # ### where this happens? note that we can't determine is_package()
-    # ### for non-Importer modules.
-    #
-    # since we imported/handled the bottom module, this means that we can
-    # also handle its fromlist (and reliably determine is_package()).
-
-    # if the bottom node is a package, then (potentially) import some modules.
-    #
-    # note: if it is not a package, then "fromlist" refers to names in
-    #       the bottom module rather than modules.
-    # note: for a mix of names and modules in the fromlist, we will
-    #       import all modules and insert those into the namespace of
-    #       the package module. Python will pick up all fromlist names
-    #       from the bottom (package) module; some will be modules that
-    #       we imported and stored in the namespace, others are expected
-    #       to be present already.
-    if self._is_package(bottom.__dict__):
-      self._import_fromlist(bottom, fromlist)
-
-    # if the form is "from a.b import c, d" then return "b"
-    return bottom
-
-  def _reload_hook(self, module):
-    "Python calls this hook to reload a module."
-
-    # reloading of a module may or may not be possible (depending on the
-    # importer), but at least we can validate that it's ours to reload
-    importer = module.__dict__.get('__importer__', None)
-    if importer is not self:
-      return self.__chain_reload(module)
-
-    # okay. it is ours, but we don't know what to do (yet)
-    ### we should blast the module dict and do another get_code(). need to
-    ### flesh this out and add proper docco...
-    raise SystemError, "reload not yet implemented"
-
-  def _determine_import_context(self, globals):
-    """Returns the context in which a module should be imported.
-
-    The context could be a loaded (package) module and the imported module
-    will be looked for within that package. The context could also be None,
-    meaning there is no context -- the module should be looked for as a
-    "top-level" module.
-    """
-
-    if not globals or \
-       globals.get('__importer__', None) is not self:
-      # globals does not refer to one of our modules or packages.
-      # That implies there is no relative import context, and it
-      # should just pick it off the standard path.
-      return None
-
-    # The globals refer to a module or package of ours. It will define
-    # the context of the new import. Get the module/package fqname.
-    parent_fqname = globals['__name__']
-
-    # for a package, return itself (imports refer to pkg contents)
-    if self._is_package(globals):
-      parent = sys.modules[parent_fqname]
-      assert globals is parent.__dict__
-      return parent
-
-    i = strop.rfind(parent_fqname, '.')
-
-    # a module outside of a package has no particular import context
-    if i == -1:
-      return None
-
-    # for a module in a package, return the package (imports refer to siblings)
-    parent_fqname = parent_fqname[:i]
-    parent = sys.modules[parent_fqname]
-    assert parent.__name__ == parent_fqname
-    return parent
-
-  def _import_top_module(self, parent, name):
-    """Locate the top of the import tree (relative or absolute).
-
-    parent defines the context in which the import should occur. See
-    _determine_import_context() for details.
-
-    Returns a tuple (module, tail). module is the loaded (top-level) module,
-    or None if the module is not found. tail is the remaining portion of
-    the dotted name.
-    """
-    i = strop.find(name, '.')
-    if i == -1:
-      head = name
-      tail = ""
-    else:
-      head = name[:i]
-      tail = name[i+1:]
-    if parent:
-      fqname = "%s.%s" % (parent.__name__, head)
-    else:
-      fqname = head
-    module = self._import_one(parent, head, fqname)
-    if module:
-      # the module was relative, or no context existed (the module was
-      # simply found on the path).
-      return module, tail
-    if parent:
-      # we tried relative, now try an absolute import (from the path)
-      module = self._import_one(None, head, head)
-      if module:
-        return module, tail
-
-    # the module wasn't found
-    return None, None
-
-  def _import_one(self, parent, modname, fqname):
-    "Import a single module."
-
-    # has the module already been imported?
-    try:
-      return sys.modules[fqname]
-    except KeyError:
-      pass
-
-    # load the module's code, or fetch the module itself
-    result = self.get_code(parent, modname, fqname)
-    if result is None:
-      return None
-
-    # did get_code() return an actual module? (rather than a code object)
-    is_module = type(result[1]) is type(sys)
-
-    # use the returned module, or create a new one to exec code into
-    if is_module:
-      module = result[1]
-    else:
-      module = imp.new_module(fqname)
-
-    ### record packages a bit differently??
-    module.__importer__ = self
-    module.__ispkg__ = result[0]
-
-    # if present, the third item is a set of values to insert into the module
-    if len(result) > 2:
-      module.__dict__.update(result[2])
-
-    # the module is almost ready... make it visible
-    sys.modules[fqname] = module
-
-    # execute the code within the module's namespace
-    if not is_module:
-      exec(result[1], module.__dict__)
-
-    # insert the module into its parent
-    if parent:
-      setattr(parent, modname, module)
-    return module
-
-  def _load_tail(self, m, tail):
-    """Import the rest of the modules, down from the top-level module.
-
-    Returns the last module in the dotted list of modules.
-    """
-    if tail:
-      for part in strop.splitfields(tail, '.'):
-        fqname = "%s.%s" % (m.__name__, part)
-        m = self._import_one(m, part, fqname)
-        if not m:
-          raise ImportError, "No module named " + fqname
-    return m
-
-  def _import_fromlist(self, package, fromlist):
-    'Import any sub-modules in the "from" list.'
-
-    # if '*' is present in the fromlist, then look for the '__all__' variable
-    # to find additional items (modules) to import.
-    if '*' in fromlist:
-      fromlist = list(fromlist) + list(package.__dict__.get('__all__', []))
-
-    for sub in fromlist:
-      # if the name is already present, then don't try to import it (it
-      # might not be a module!).
-      if sub != '*' and not hasattr(package, sub):
-        subname = "%s.%s" % (package.__name__, sub)
-        submod = self._import_one(package, sub, subname)
-        if not submod:
-          raise ImportError, "cannot import name " + subname
-
-  def _is_package(self, module_dict):
-    """Determine if a given module (dictionary) specifies a package.
-
-    The package status is in the module-level name __ispkg__. The module
-    must also have been imported by self, so that we can reliably apply
-    semantic meaning to __ispkg__.
-
-    ### weaken the test to issubclass(Importer)?
-    """
-    return module_dict.get('__importer__', None) is self and \
-           module_dict['__ispkg__']
-
-  ######################################################################
-  #
-  # METHODS TO OVERRIDE
-  #
-  def get_code(self, parent, modname, fqname):
-    """Find and retrieve the code for the given module.
-
-    parent specifies a parent module to define a context for importing. It
-    may be None, indicating no particular context for the search.
-
-    modname specifies a single module (not dotted) within the parent.
-
-    fqname specifies the fully-qualified module name. This is a (potentially)
-    dotted name from the "root" of the module namespace down to the modname.
-    If there is no parent, then modname==fqname.
-
-    This method should return None, a 2-tuple, or a 3-tuple.
-
-    * If the module was not found, then None should be returned.
-
-    * The first item of the 2- or 3-tuple should be the integer 0 or 1,
-      specifying whether the module that was found is a package or not.
-
-    * The second item is the code object for the module (it will be
-      executed within the new module's namespace). This item can also
-      be a fully-loaded module object (e.g. loaded from a shared lib).
-
-    * If present, the third item is a dictionary of name/value pairs that
-      will be inserted into new module before the code object is executed.
-      This provided in case the module's code expects certain values (such
-      as where the module was found). When the second item is a module
-      object, then these names/values will be inserted *after* the module
-      has been loaded/initialized.
-    """
-    raise RuntimeError, "get_code not implemented"
-
-
-######################################################################
-#
-# Simple function-based importer
-#
-class FuncImporter(Importer):
-  "Importer subclass to use a supplied function rather than method overrides."
-  def __init__(self, func):
-    self.func = func
-  def get_code(self, parent, modname, fqname):
-    return self.func(parent, modname, fqname)
-
-def install_with(func):
-  FuncImporter(func).install()
-
-
-######################################################################
-#
-# Base class for archive-based importing
-#
-class PackageArchiveImporter(Importer):
-  "Importer subclass to import from (file) archives."
-
-  def get_code(self, parent, modname, fqname):
-    if parent:
-      # if a parent "package" is provided, then we are importing a sub-file
-      # from the archive.
-      result = self.get_subfile(parent.__archive__, modname)
-      if result is None:
-        return None
-      if type(result) == type(()):
-        return (0,) + result
-      return 0, result
-
-    # no parent was provided, so the archive should exist somewhere on the
-    # default "path".
-    archive = self.get_archive(modname)
-    if archive is None:
-      return None
-    return 1, "", {'__archive__':archive}
-
-  def get_archive(self, modname):
-    """Get an archive of modules.
-
-    This method should locate an archive and return a value which can be
-    used by get_subfile to load modules from it. The value may be a simple
-    pathname, an open file, or a complex object that caches information
-    for future imports.
-
-    Return None if the archive was not found.
-    """
-    raise RuntimeError, "get_archive not implemented"
-
-  def get_subfile(self, archive, modname):
-    """Get code from a subfile in the specified archive.
-
-    Given the specified archive (as returned by get_archive()), locate
-    and return a code object for the specified module name.
-
-    A 2-tuple may be returned, consisting of a code object and a dict
-    of name/values to place into the target module.
-
-    Return None if the subfile was not found.
-    """
-    raise RuntimeError, "get_subfile not implemented"
-
-
-class PackageArchive(PackageArchiveImporter):
-  "PackageArchiveImporter subclass that refers to a specific archive."
-
-  def __init__(self, modname, archive_pathname):
-    self.__modname = modname
-    self.__path = archive_pathname
-
-  def get_archive(self, modname):
-    if modname == self.__modname:
-      return self.__path
-    return None
-
-  # get_subfile is passed the full pathname of the archive
-
-
-######################################################################
-#
-# Emulate the standard directory-based import mechanism
-#
-
-class DirectoryImporter(Importer):
-  "Importer subclass to emulate the standard importer."
-
-  def __init__(self, dir):
-    self.dir = dir
-    self.ext_char = __debug__ and 'c' or 'o'
-    self.ext = '.py' + self.ext_char
-
-  def get_code(self, parent, modname, fqname):
-    if parent:
-      dir = parent.__pkgdir__
-    else:
-      dir = self.dir
-
-    # pull the os module from our instance data. we don't do this at the
-    # top-level, because it isn't a builtin module (and we want to defer
-    # loading non-builtins until as late as possible).
-    try:
-      os = self.os
-    except AttributeError:
-      import os
-      self.os = os
-
-    pathname = os.path.join(dir, modname)
-    if os.path.isdir(pathname):
-      values = { '__pkgdir__': pathname }
-      ispkg = 1
-      pathname = os.path.join(pathname, '__init__')
-    else:
-      values = { }
-      ispkg = 0
-
-    t_py = self._timestamp(pathname + '.py')
-    t_pyc = self._timestamp(pathname + self.ext)
-    if t_py is None and t_pyc is None:
-      return None
-    code = None
-    if t_py is None or (t_pyc is not None and t_pyc >= t_py):
-      f = open(pathname + self.ext, 'rb')
-      if f.read(4) == imp.get_magic():
-        t = struct.unpack('<I', f.read(4))[0]
-        if t == t_py:
-          code = marshal.load(f)
-      f.close()
-    if code is None:
-      code = self._compile(pathname + '.py', t_py)
-    return ispkg, code, values
-
-  def _timestamp(self, pathname):
-    try:
-      s = self.os.stat(pathname)
-    except OSError:
-      return None
-    return long(s[8])
-
-  def _compile(self, pathname, timestamp):
-    codestring = open(pathname, 'r').read()
-    if codestring and codestring[-1] != '\n':
-      codestring = codestring + '\n'
-    code = __builtin__.compile(codestring, pathname, 'exec')
-
-    # try to cache the compiled code
-    try:
-      f = open(pathname + self.ext_char, 'wb')
-      f.write('\0\0\0\0')
-      f.write(struct.pack('<I', timestamp))
-      marshal.dump(code, f)
-      f.flush()
-      f.seek(0, 0)
-      f.write(imp.get_magic())
-      f.close()
-    except OSError:
-      pass
-
-    return code
-
-  def __repr__(self):
-    return '<%s.%s for "%s" at 0x%x>' % (self.__class__.__module__,
-                                         self.__class__.__name__,
-                                         self.dir,
-                                         id(self))
-
-def _test_dir():
-  "Debug/test function to create DirectoryImporters from sys.path."
-  path = sys.path[:]
-  path.reverse()
-  for d in path:
-    DirectoryImporter(d).install()
-
-######################################################################

+ 0 - 91
direct/src/pyinst/installutils.py

@@ -1,91 +0,0 @@
-# copyright 1999 McMillan Enterprises, Inc.
-# demo code - use as you please.
-import os
-import stat
-
-def copyFile(srcFiles, destFile, append=0):
-    '''
-    Copy one or more files to another file.  If srcFiles is a list, then all
-    will be concatenated together to destFile.  The append flag is also valid
-    for single file copies.
-
-    destFile will have the mode, ownership and timestamp of the last file
-    copied/appended.
-    '''
-    if type(srcFiles) == type([]):
-        # in case we need to overwrite on the first file...
-        copyFile(srcFiles[0], destFile, append)
-        for file in srcFiles[1:]:
-            copyFile(file, destFile, 1)
-        return
-
-    mode = 'wb'
-    if append:
-        mode = 'ab'
-    print " ", srcFiles, "->",
-    input = open(srcFiles, 'rb')
-    if input:
-        print destFile
-        output = open(destFile, mode)
-        while 1:
-            bytesRead = input.read(8192)
-            if bytesRead:
-                output.write(bytesRead)
-            else:
-                break
-
-        input.close()
-        output.close()
-
-        stats = os.stat(srcFiles)
-        os.chmod(destFile, stats[stat.ST_MODE])
-        try:        # FAT16 file systems have only one file time
-            os.utime(destFile, (stats[stat.ST_ATIME], stats[stat.ST_MTIME]))
-        except:
-            pass
-        try:        
-            os.chown(destFile, stats[stat.ST_UID], stats[stat.ST_GID])
-        except:
-            pass
-
-def ensure(dirct):
-    dirnm = dirct
-    plist = []
-    try:
-        while not os.path.exists(dirnm):
-            dirnm, base = os.path.split(dirnm)
-            if base == '':
-                break
-            plist.insert(0, base)
-        for d in plist:
-            dirnm = os.path.join(dirnm, d)
-            os.mkdir(dirnm)
-    except:
-        return 0
-    return 1
-
-def getinstalldir(prompt="Enter an installation directory: "):
-    while 1:
-        installdir = raw_input("Enter an installation directory: ")
-        installdir = os.path.normpath(installdir)
-        if ensure(installdir):
-            break
-        else:
-            print installdir, "is not a valid pathname"
-            r = raw_input("Try again (y/n)?: ")
-            if r in 'nN':
-                sys.exit(0)
-    return installdir
-
-def installCArchive(nm, basedir, suffixdir):
-    import carchive_rt
-    fulldir = os.path.join(basedir, suffixdir)
-    if ensure(fulldir):
-        pkg = carchive_rt.CArchive(nm)
-        for fnm in pkg.contents():
-            stuff = pkg.extract(fnm)[1]
-            outnm = os.path.join(fulldir, fnm)
-            if ensure(os.path.dirname(outnm)):
-                open(outnm, 'wb').write(stuff)
-        pkg = None
-        os.remove(nm)

+ 0 - 85
direct/src/pyinst/ltoc.py

@@ -1,85 +0,0 @@
-import os, sys, UserList
-import finder, tocfilter, resource
-
-class lTOC(UserList.UserList):
-    """ A class for managing lists of resources.
-        Should be a UserList subclass. Doh. 
-        Like a list, but has merge(other) and filter() methods """
-    def __init__(self, reslist=None, filters=None):
-        UserList.UserList.__init__(self, reslist)
-        self.filters = []
-        if filters is not None:
-            self.filters = filters[:]
-    def prepend(self, res):
-        self.resources.insert(0, res)
-    def merge(self, other):
-        ' merge in another ltoc, discarding dups and preserving order '
-        tmp = {}
-        for res in self.data:
-            tmp[res.name] = 0
-        for res in other:
-            if tmp.get(res.name, 1):
-                self.data.append(res)
-                tmp[res.name] = 0
-    def filter(self):
-        ' invoke all filters '
-        for i in range(len(self.data)):
-            res = self.data[i]
-            if res:
-                for f in self.filters:
-                    if f.matches(res):
-                        self.data[i] = None
-                        break
-        self.data = filter(None, self.data)
-        return self
-    def unique(self):
-        ' remove all duplicate entries, preserving order '
-        new = self.__class__()
-        new.merge(self)
-        self.data = new.data
-    def toList(self):
-        ' return self as a list of (name, path, typ) '
-        tmp = []
-        for res in self.data:
-            tmp.append((res.name, res.path, res.typ))
-        return tmp
-    def addFilter(self, filter):
-        if type(filter) == type(''):
-            self.filters.append(finder.makeresource(filter).asFilter())
-        else:
-            if type(filter) == type(self):
-                if isinstance(filter, tocfilter._Filter):
-                    self.filters.append(filter)
-                elif isinstance(filter, resource.resource):
-                    self.filters.append(filter.asFilter())
-                else:
-                    raise ValueError, "can't make filter from %s", repr(filter)
-            else:
-                raise ValueError, "can't make filter from %s", repr(filter)
-        print " added filter", repr(self.filters[-1])             
-            
-   
-if __name__ == '__main__':
-    sys.path.insert(0, '.')
-    import finder
-    import pprint
-    s = finder.scriptresource('finder.py', './finder.py')
-    ##    pyltoc = lTOC(s.modules)
-    ##    l1 = pyltoc.toList()
-    ##    print "Raw py ltoc:", pprint.pprint(l1)
-    ##    f1 = ModFilter(['dospath', 'macpath', 'posixpath'])
-    ##    l2 = lTOC(s.modules).filter(f1).toList()
-    ##    print "Filter out dospath, macpath, posixpath:", pprint.pprint(l2)
-    ##    f2 = DirFilter(['.'])
-    ##    l3 = lTOC(s.modules).filter(f2).toList()
-    ##    print "Filter out current dir:", pprint.pprint(l3)
-    ##    f3 = StdLibFilter()
-    ##    l4 = lTOC(s.modules).filter(f3).toList()
-    ##    print "Filter out stdlib:", pprint.pprint(l4)
-    ##    #print "Filter out current dir and stdlib:", lTOC(s.modules).filter(f2, f3).toList()
-    binltoc = lTOC(s.binaries)
-    print "Raw bin ltoc:", pprint.pprint(binltoc.toList())
-    binltoc.addFilter('c:/winnt/system32')
-    pprint.pprint(binltoc.filter().toList())
-    
-    

+ 0 - 42
direct/src/pyinst/mkarchive.py

@@ -1,42 +0,0 @@
-#import MkWrap
-import imputil
-import strop
-import zlib
-import os
-import marshal
-
-class MkImporter:
-    def __init__(self, db, viewnm='pylib'):
-        self.db = db
-        self.view = db.getas(viewnm+'[name:S, ispkg:I, code:M]') # an MkWrap view object
-    def setImportHooks(self):
-        imputil.FuncImporter(self.get_code).install()
-    def get_code(self, parent, modname, fqname):
-        if self.view is None:
-            return None
-        ndx = self.view.search(name=fqname)
-        if ndx < len(self.view):
-            row = self.view[ndx]
-            if row.name == fqname:
-                return (row.ispkg, marshal.loads(zlib.decompress(row.code)))
-        return None
-    def build(self, lTOC):
-        for entry in lTOC:
-            nm, fnm = entry[0], entry[1]
-            ispkg = os.path.splitext(os.path.basename(fnm))[0] == '__init__'
-            ndx = self.view.search(name=nm)
-            if ndx < len(self.view):
-                row = self.view[ndx]
-                if row.name != nm:
-                    self.view.insert(ndx, {})
-                    row = self.view[ndx]
-            else:
-                ndx = self.view.append({})
-                row = self.view[ndx]
-            row.name = nm
-            row.ispkg = ispkg
-            f = open(fnm, 'rb')
-            f.seek(8)
-            obj = zlib.compress(f.read(), 9)
-            row.code = obj
-        self.db.commit()

+ 0 - 436
direct/src/pyinst/modulefinder.py

@@ -1,436 +0,0 @@
-"""Find modules used by a script, using introspection."""
-
-import dis
-import imp
-import marshal
-import os
-import re
-import string
-import sys
-
-if sys.platform=="win32":
-    # On Windows, we can locate modules in the registry with
-    # the help of the win32api package.
-    try:
-        import win32api
-    except ImportError:
-        print "The win32api module is not available - modules listed"
-        print "in the registry will not be found."
-        win32api = None
-
-
-IMPORT_NAME = dis.opname.index('IMPORT_NAME')
-IMPORT_FROM = dis.opname.index('IMPORT_FROM')
-
-# Modulefinder does a good job at simulating Python's, but it can not
-# handle __path__ modifications packages make at runtime.  Therefore there
-# is a mechanism whereby you can register extra paths in this map for a
-# package, and it will be honoured.
-
-# Note this is a mapping is lists of paths.
-packagePathMap = {}
-
-# A Public interface
-def AddPackagePath(packagename, path):
-    paths = packagePathMap.get(packagename, [])
-    paths.append(path)
-    packagePathMap[packagename] = paths
-
-class Module:
-
-    def __init__(self, name, file=None, path=None):
-        self.__name__ = name
-        self.__file__ = file
-        self.__path__ = path
-        self.__code__ = None
-
-    def __repr__(self):
-        s = "Module(%s" % repr(self.__name__)
-        if self.__file__ is not None:
-            s = s + ", %s" % repr(self.__file__)
-        if self.__path__ is not None:
-            s = s + ", %s" % repr(self.__path__)
-        s = s + ")"
-        return s
-
-
-class ModuleFinder:
-
-    def __init__(self, path=None, debug=0, excludes = []):
-        if path is None:
-            path = sys.path
-        self.path = path
-        self.modules = {}
-        self.badmodules = {}
-        self.debug = debug
-        self.indent = 0
-        self.excludes = excludes
-
-    def msg(self, level, str, *args):
-        if level <= self.debug:
-            for i in range(self.indent):
-                print "   ",
-            print str,
-            for arg in args:
-                print repr(arg),
-            print
-
-    def msgin(self, *args):
-        level = args[0]
-        if level <= self.debug:
-            self.indent = self.indent + 1
-            apply(self.msg, args)
-
-    def msgout(self, *args):
-        level = args[0]
-        if level <= self.debug:
-            self.indent = self.indent - 1
-            apply(self.msg, args)
-
-    def run_script(self, pathname):
-        self.msg(2, "run_script", pathname)
-        fp = open(pathname)
-        stuff = ("", "r", imp.PY_SOURCE)
-        self.load_module('__main__', fp, pathname, stuff)
-
-    def load_file(self, pathname):
-        dir, name = os.path.split(pathname)
-        name, ext = os.path.splitext(name)
-        fp = open(pathname)
-        stuff = (ext, "r", imp.PY_SOURCE)
-        self.load_module(name, fp, pathname, stuff)
-
-    def import_hook(self, name, caller=None, fromlist=None):
-        self.msg(3, "import_hook", name, caller, fromlist)
-        parent = self.determine_parent(caller)
-        q, tail = self.find_head_package(parent, name)
-        m = self.load_tail(q, tail)
-        if not fromlist:
-            return q
-        if m.__path__:
-            self.ensure_fromlist(m, fromlist)
-
-    def determine_parent(self, caller):
-        self.msgin(4, "determine_parent", caller)
-        if not caller:
-            self.msgout(4, "determine_parent -> None")
-            return None
-        pname = caller.__name__
-        if caller.__path__:
-            parent = self.modules[pname]
-            assert caller is parent
-            self.msgout(4, "determine_parent ->", parent)
-            return parent
-        if '.' in pname:
-            i = string.rfind(pname, '.')
-            pname = pname[:i]
-            parent = self.modules[pname]
-            assert parent.__name__ == pname
-            self.msgout(4, "determine_parent ->", parent)
-            return parent
-        self.msgout(4, "determine_parent -> None")
-        return None
-
-    def find_head_package(self, parent, name):
-        self.msgin(4, "find_head_package", parent, name)
-        if '.' in name:
-            i = string.find(name, '.')
-            head = name[:i]
-            tail = name[i+1:]
-        else:
-            head = name
-            tail = ""
-        if parent:
-            qname = "%s.%s" % (parent.__name__, head)
-        else:
-            qname = head
-        q = self.import_module(head, qname, parent)
-        if q:
-            self.msgout(4, "find_head_package ->", (q, tail))
-            return q, tail
-        if parent:
-            qname = head
-            parent = None
-            q = self.import_module(head, qname, parent)
-            if q:
-                self.msgout(4, "find_head_package ->", (q, tail))
-                return q, tail
-        self.msgout(4, "raise ImportError: No module named", qname)
-        raise ImportError, "No module named " + qname
-
-    def load_tail(self, q, tail):
-        self.msgin(4, "load_tail", q, tail)
-        m = q
-        while tail:
-            i = string.find(tail, '.')
-            if i < 0: i = len(tail)
-            head, tail = tail[:i], tail[i+1:]
-            mname = "%s.%s" % (m.__name__, head)
-            m = self.import_module(head, mname, m)
-            if not m:
-                self.msgout(4, "raise ImportError: No module named", mname)
-                raise ImportError, "No module named " + mname
-        self.msgout(4, "load_tail ->", m)
-        return m
-
-    def ensure_fromlist(self, m, fromlist, recursive=0):
-        self.msg(4, "ensure_fromlist", m, fromlist, recursive)
-        for sub in fromlist:
-            if sub == "*":
-                if not recursive:
-                    all = self.find_all_submodules(m)
-                    if all:
-                        self.ensure_fromlist(m, all, 1)
-            elif not hasattr(m, sub):
-                subname = "%s.%s" % (m.__name__, sub)
-                submod = self.import_module(sub, subname, m)
-                if not submod:
-                    raise ImportError, "No module named " + subname
-
-    def find_all_submodules(self, m):
-        if not m.__path__:
-            return
-        modules = {}
-        suffixes = [".py", ".pyc", ".pyo"]
-        for dir in m.__path__:
-            try:
-                names = os.listdir(dir)
-            except os.error:
-                self.msg(2, "can't list directory", dir)
-                continue
-            for name in names:
-                mod = None
-                for suff in suffixes:
-                    n = len(suff)
-                    if name[-n:] == suff:
-                        mod = name[:-n]
-                        break
-                if mod and mod != "__init__":
-                    modules[mod] = mod
-        return modules.keys()
-
-    def import_module(self, partname, fqname, parent):
-        self.msgin(3, "import_module", partname, fqname, parent)
-        try:
-            m = self.modules[fqname]
-        except KeyError:
-            pass
-        else:
-            self.msgout(3, "import_module ->", m)
-            return m
-        if fqname in self.badmodules:
-            self.msgout(3, "import_module -> None")
-            self.badmodules[fqname][parent.__name__] = None
-            return None
-        try:
-            fp, pathname, stuff = self.find_module(partname,
-                                                   parent and parent.__path__)
-        except ImportError:
-            self.msgout(3, "import_module ->", None)
-            return None
-        try:
-            m = self.load_module(fqname, fp, pathname, stuff)
-        finally:
-            if fp: fp.close()
-        if parent:
-            setattr(parent, partname, m)
-        self.msgout(3, "import_module ->", m)
-        return m
-
-    def load_module(self, fqname, fp, pathname, (suffix, mode, type)):
-        self.msgin(2, "load_module", fqname, fp and "fp", pathname)
-        if type == imp.PKG_DIRECTORY:
-            m = self.load_package(fqname, pathname)
-            self.msgout(2, "load_module ->", m)
-            return m
-        if type == imp.PY_SOURCE:
-            co = compile(fp.read()+'\n', pathname, 'exec')
-        elif type == imp.PY_COMPILED:
-            if fp.read(4) != imp.get_magic():
-                self.msgout(2, "raise ImportError: Bad magic number", pathname)
-                raise ImportError, "Bad magic number in %s", pathname
-            fp.read(4)
-            co = marshal.load(fp)
-        else:
-            co = None
-        m = self.add_module(fqname)
-        m.__file__ = pathname
-        if co:
-            m.__code__ = co
-            self.scan_code(co, m)
-        self.msgout(2, "load_module ->", m)
-        return m
-
-    def scan_code(self, co, m):
-        code = co.co_code
-        n = len(code)
-        i = 0
-        lastname = None
-        while i < n:
-            c = code[i]
-            i = i+1
-            op = ord(c)
-            if op >= dis.HAVE_ARGUMENT:
-                oparg = ord(code[i]) + ord(code[i+1])*256
-                i = i+2
-            if op == IMPORT_NAME:
-                name = lastname = co.co_names[oparg]
-                if lastname not in self.badmodules:
-                    try:
-                        self.import_hook(name, m)
-                    except ImportError, msg:
-                        self.msg(2, "ImportError:", str(msg))
-                        if name not in self.badmodules:
-                            self.badmodules[name] = {}
-                        self.badmodules[name][m.__name__] = None
-            elif op == IMPORT_FROM:
-                name = co.co_names[oparg]
-                assert lastname is not None
-                if lastname not in self.badmodules:
-                    try:
-                        self.import_hook(lastname, m, [name])
-                    except ImportError, msg:
-                        self.msg(2, "ImportError:", str(msg))
-                        fullname = lastname + "." + name
-                        if fullname not in self.badmodules:
-                            self.badmodules[fullname] = {}
-                        self.badmodules[fullname][m.__name__] = None
-            else:
-                lastname = None
-        for c in co.co_consts:
-            if isinstance(c, type(co)):
-                self.scan_code(c, m)
-
-    def load_package(self, fqname, pathname):
-        self.msgin(2, "load_package", fqname, pathname)
-        m = self.add_module(fqname)
-        m.__file__ = pathname
-        m.__path__ = [pathname]
-
-        # As per comment at top of file, simulate runtime __path__ additions.
-        m.__path__ = m.__path__ + packagePathMap.get(fqname, [])
-
-        fp, buf, stuff = self.find_module("__init__", m.__path__)
-        self.load_module(fqname, fp, buf, stuff)
-        self.msgout(2, "load_package ->", m)
-        return m
-
-    def add_module(self, fqname):
-        if fqname in self.modules:
-            return self.modules[fqname]
-        self.modules[fqname] = m = Module(fqname)
-        return m
-
-    def find_module(self, name, path):
-        if name in self.excludes:
-            self.msgout(3, "find_module -> Excluded")
-            raise ImportError, name
-
-        if path is None:
-            if name in sys.builtin_module_names:
-                return (None, None, ("", "", imp.C_BUILTIN))
-
-            # Emulate the Registered Module support on Windows.
-            if sys.platform=="win32" and win32api is not None:
-                HKEY_LOCAL_MACHINE = 0x80000002
-                try:
-                    pathname = win32api.RegQueryValue(HKEY_LOCAL_MACHINE, "Software\\Python\\PythonCore\\%s\\Modules\\%s" % (sys.winver, name))
-                    fp = open(pathname, "rb")
-                    # XXX - To do - remove the hard code of C_EXTENSION.
-                    stuff = "", "rb", imp.C_EXTENSION
-                    return fp, pathname, stuff
-                except win32api.error:
-                    pass
-
-            path = self.path
-        return imp.find_module(name, path)
-
-    def report(self):
-        print
-        print "  %-25s %s" % ("Name", "File")
-        print "  %-25s %s" % ("----", "----")
-        # Print modules found
-        keys = self.modules.keys()
-        keys.sort()
-        for key in keys:
-            m = self.modules[key]
-            if m.__path__:
-                print "P",
-            else:
-                print "m",
-            print "%-25s" % key, m.__file__ or ""
-
-        # Print missing modules
-        keys = self.badmodules.keys()
-        keys.sort()
-        for key in keys:
-            # ... but not if they were explicitely excluded.
-            if key not in self.excludes:
-                mods = self.badmodules[key].keys()
-                mods.sort()
-                print "?", key, "from", string.join(mods, ', ')
-
-
-def test():
-    # Parse command line
-    import getopt
-    try:
-        opts, args = getopt.getopt(sys.argv[1:], "dmp:qx:")
-    except getopt.error, msg:
-        print msg
-        return
-
-    # Process options
-    debug = 1
-    domods = 0
-    addpath = []
-    exclude = []
-    for o, a in opts:
-        if o == '-d':
-            debug = debug + 1
-        if o == '-m':
-            domods = 1
-        if o == '-p':
-            addpath = addpath + string.split(a, os.pathsep)
-        if o == '-q':
-            debug = 0
-        if o == '-x':
-            exclude.append(a)
-
-    # Provide default arguments
-    if not args:
-        script = "hello.py"
-    else:
-        script = args[0]
-
-    # Set the path based on sys.path and the script directory
-    path = sys.path[:]
-    path[0] = os.path.dirname(script)
-    path = addpath + path
-    if debug > 1:
-        print "path:"
-        for item in path:
-            print "   ", repr(item)
-
-    # Create the module finder and turn its crank
-    mf = ModuleFinder(path, debug, exclude)
-    for arg in args[1:]:
-        if arg == '-m':
-            domods = 1
-            continue
-        if domods:
-            if arg[-2:] == '.*':
-                mf.import_hook(arg[:-2], None, ["*"])
-            else:
-                mf.import_hook(arg)
-        else:
-            mf.load_file(arg)
-    mf.run_script(script)
-    mf.report()
-
-
-if __name__ == '__main__':
-    try:
-        test()
-    except KeyboardInterrupt:
-        print "\n[interrupt]"

+ 0 - 317
direct/src/pyinst/resource.py

@@ -1,317 +0,0 @@
-import os
-import string
-import archivebuilder
-import carchive
-import tocfilter
-import bindepend
-import finder
-
-_cache = {}
-
-def makeresource(name, xtrapath=None):
-    """Factory function that returns a resource subclass.
-
-       NAME is the logical or physical name of a resource.
-       XTRAPTH is a path or list of paths to search first.
-       return one of the resource subclasses.
-       Warning - logical names can conflict; archive might return a directory,
-       when the module archive.py was desired."""
-    typ, nm, fullname = finder.identify(name, xtrapath)
-    fullname = os.path.normpath(fullname)
-    if fullname in _cache:
-        return _cache[fullname]
-    elif typ in (finder.SCRIPT, finder.GSCRIPT):
-        rsrc = scriptresource(nm, fullname)
-    elif typ == finder.MODULE:
-        rsrc = moduleresource(nm, fullname)
-    elif typ == finder.PACKAGE:
-        rsrc = pkgresource(nm, fullname)
-    elif typ in (finder.PBINARY, finder.BINARY):
-        rsrc = binaryresource(nm, fullname)
-    elif typ == finder.ZLIB:
-        rsrc = zlibresource(nm, fullname)
-    elif typ == finder.DIRECTORY:
-        rsrc = dirresource(nm, fullname)
-    else:
-        try:
-            carchive.CArchive(fullname)
-        except:
-            rsrc = dataresource(nm, fullname)
-        else:
-            rsrc = archiveresource(nm, fullname)
-    _cache[fullname] = rsrc
-    return rsrc
-
-class resource:
-    """ Base class for all resources.
-
-        contents() returns of list of what's contained (eg files in dirs)
-        dependencies() for Python resources returns a list of moduleresources
-         and binaryresources """
-    def __init__(self, name, path, typ):
-        """NAME is the logical name of the resource.
-           PATH is the full path to the resource.
-           TYP is the type code.
-           No editting or sanity checks."""
-        self.name = name
-        self.path = path
-        self.typ = typ
-    def __repr__(self):
-        return "(%(name)s, %(path)s, %(typ)s)" % self.__dict__
-    def contents(self):
-        """A list of resources within this resource.
-
-           Overridable.
-           Base implementation returns [self]"""
-        return [self]
-    def dependencies(self):
-        """A list of resources this resource requires.
-
-           Overridable.
-           Base implementation returns []"""
-        return []
-    def __cmp__(self, other):
-        if not isinstance(other, self.__class__):
-            return -1
-        return cmp((self.typ, self.name), (other.typ, other.name))
-    def asFilter(self):
-        """Create a tocfilter based on self.
-
-           Pure virtual"""
-        raise NotImplementedError
-    def asSource(self):
-        """Return self in source form.
-
-           Base implementation returns self"""
-        return self
-    def asBinary(self):
-        """Return self in binary form.
-
-           Base implementation returns self"""
-        return self
-
-class pythonresource(resource):
-    """An empty base class.
-
-       Used to classify resources."""
-    pass
-
-
-class scriptresource(pythonresource):
-    """ A top-level python resource.
-
-        Has (lazily computed) attributes, modules and binaries, which together
-        are the scripts dependencies() """
-    def __init__(self, name, fullname):
-        resource.__init__(self, name, fullname, 's')
-    def __getattr__(self, name):
-        if name == 'modules':
-            print "Analyzing python dependencies of", self.name, self.path
-            self.modules = []
-            self._binaries = []
-            nodes = string.split(self.name, '.')[:-1] # MEInc.Dist.archive -> ['MEInc', 'Dist']
-            for i in range(len(nodes)):
-                nm = string.join(nodes[:i+1], '.')
-                rsrc = makeresource(nm+'.__init__')
-                rsrc.name = nm
-                self.modules.append(rsrc)
-            for (nm, path) in archivebuilder.Dependencies(self.path):
-                path = os.path.normcase(os.path.abspath(path))
-                if os.path.splitext(path)[1] == '.py':
-                    self.modules.append(moduleresource(nm, path))
-                else:
-                    self._binaries.append(binaryresource(nm, path))
-            return self.modules
-        elif name == 'binaries':
-            x = self.modules
-            tmp = {}
-            for br in self._binaries:
-                tmp[br.name] = br
-                for br2 in br.dependencies():
-                    tmp[br2.name] = br2
-            self.binaries = tmp.values()
-            return self.binaries
-        else:
-            raise AttributeError, "%s" % name
-    def dependencies(self):
-        """Return all dependencies (Python and binary) of self."""
-        return self.modules + self.binaries
-    def asFilter(self):
-        """Return a ModFilter based on self."""
-        return tocfilter.ModFilter([self.name])
-    def asSource(self):
-        """Return self as a dataresource (ie, a text file wrapper)."""
-        r = dataresource(self.path)
-        r.name = apply(os.path.join, string.split(self.name, '.')[:-1]+[r.name])
-        return r
-
-class moduleresource(scriptresource):
-    """ A module resource (differs from script in that it will generally
-        be worked with as a .pyc instead of in source form) """
-    def __init__(self, name, fullname):
-        resource.__init__(self, name, fullname, 'm')
-    def asBinary(self):
-        """Return self as a dataresource (ie, a binary file wrapper)."""
-        r = dataresource(self.path)
-        r.name = os.path.basename(r.name)
-        r.typ = 'b'
-        return r
-    def asSource(self):
-        """Return self as a scriptresource (ie, uncompiled form)."""
-        return scriptresource(self.name, self.path[:-1]).asSource()
-
-class binaryresource(resource):
-    """A .dll or .pyd.
-
-       dependencies() yields more binaryresources """
-    def __init__(self, name, fullname):
-        if string.find(name, '.') == -1:
-            pth, bnm = os.path.split(fullname)
-            junk, ext = os.path.splitext(bnm)
-            fullname = os.path.join(pth, name + ext)
-        resource.__init__(self, name, fullname, 'b')
-        self._depends = None
-    def dependencies(self):
-        """Return a list of binary dependencies."""
-        if self._depends is not None:
-            return self._depends
-        self._depends = []
-        for (lib, path) in bindepend.Dependencies([(self.name, self.path)]):
-            self._depends.append(binaryresource(lib, path))
-        return self._depends
-    def asFilter(self):
-        """Create a FileFilter from self."""
-        return tocfilter.FileFilter([self.name])
-
-class dataresource(resource):
-    """A subclass for arbitrary files. """
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'x')
-    def asFilter(self):
-        """Create a FileFilter from self."""
-        return tocfilter.FileFilter([self.name])
-
-class archiveresource(dataresource):
-    """A sublcass for CArchives. """
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'a')
-
-class zlibresource(dataresource):
-    """A subclass for ZlibArchives. """
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'z')
-
-class dirresource(resource):
-    """A sublcass for a directory.
-
-       Generally transformed to a list of files through
-        contents() and filtered by file extensions or resource type.
-        Note that contents() is smart enough to regard a .py and .pyc
-        as the same resource. """
-    RECURSIVE = 0
-    def __init__(self, name, fullname=None):
-        resource.__init__(self, name, fullname or name, 'd')
-        self._contents = None
-    def contents(self, prefix=''):
-        """Return the list of (typed) resources in self.name"""
-        if self._contents is not None:
-            return self._contents
-        self._contents = []
-        flist = os.listdir(self.path)
-        for fnm in flist:
-            try:
-                bnm, ext = os.path.splitext(fnm)
-                if ext == '.py' and (bnm+'.pyc' in flist or bnm+'.pyo' in flist):
-                    pass
-                elif ext == '.pyo' and (bnm + '.pyc' in flist):
-                    pass
-                else:
-                    rsrc = makeresource(os.path.join(self.path, fnm))
-                    if isinstance(rsrc, pkgresource):
-                        rsrc = self.__class__(rsrc.path)
-                    if self.RECURSIVE:
-                        if isinstance(rsrc, moduleresource) or isinstance(rsrc, scriptresource):
-                            rsrc = rsrc.asSource()
-                            fnm = os.path.basename(rsrc.path)
-                        rsrc.name = os.path.join(prefix, fnm)
-                        if rsrc.typ == 'd':
-                            rsrc.RECURSIVE = 1
-                            self._contents.extend(rsrc.contents(rsrc.name))
-                        else:
-                            self._contents.append(rsrc)
-                    else:
-                        self._contents.append(rsrc)
-            except ValueError, e:
-                raise RuntimeError, "Can't make resource from %s\n ValueError: %s" \
-                      % (os.path.join(self.path, fnm), repr(e.args))
-        return self._contents
-    def asFilter(self):
-        return tocfilter.DirFilter([self.path])
-
-class treeresource(dirresource):
-    """A subclass for a directory and subdirectories."""
-    RECURSIVE = 1
-    def __init__(self, name, fullname=None):
-        dirresource.__init__(self, name, fullname)
-
-class pkgresource(pythonresource):
-    """A Python package.
-
-        Note that contents() can be fooled by fancy __path__ statements. """
-    def __init__(self, nm, fullname):
-        resource.__init__(self, nm, fullname, 'p')
-        self._contents = None
-        self._depends = None
-    def contents(self, parent=None):
-        """Return a list of subpackages and modules in self."""
-        if self._contents is not None:
-            return self._contents
-        if parent is None:
-            parent = self.name
-        self._contents = []
-        cheat = treeresource(self.path)
-        for rsrc in cheat.contents():
-            if os.path.splitext(rsrc.path)[1] == '.py':
-                rsrc = moduleresource(string.replace(rsrc.name[:-3], os.sep, '.'),
-                                      rsrc.path)
-                if rsrc.name[-8:] == '__init__':
-                    rsrc.name = rsrc.name[:-9]
-            elif os.path.isdir(rsrc.path):
-                rsrc = makeresource(rsrc.path)
-            else:
-                continue
-            if rsrc.name:
-                rsrc.name = parent + '.' + rsrc.name
-            else:
-                rsrc.name = parent
-            if rsrc.typ == 'm':
-                self._contents.append(rsrc)
-            elif rsrc.typ == 'p':
-                self._contents.extend(rsrc.contents(rsrc.name))
-        return self._contents
-    def dependencies(self):
-        """Return the list of accumulated dependencies of all modules in self."""
-        if self._depends is not None:
-            return self._depends
-        self._depends = []
-        tmp = {}
-        for rsrc in self.contents():
-            for r in rsrc.dependencies():
-                tmp[r.name] = r
-        self._depends = tmp.values()
-        return self._depends
-    def asFilter(self):
-        """Create a PkgFilter from self."""
-        return tocfilter.PkgFilter([os.path.dirname(self.path)])
-
-
-
-
-
-
-
-if __name__ == '__main__':
-    s = scriptresource('finder.py', './finder.py')
-    print "s.modules:", s.modules
-    print "s.binaries:", s.binaries
-

+ 0 - 131
direct/src/pyinst/tocfilter.py

@@ -1,131 +0,0 @@
-import os
-import finder
-import re
-import sys
-
-def makefilter(name, xtrapath=None):
-    typ, nm, fullname = finder.identify(name, xtrapath)
-    if typ in (finder.SCRIPT, finder.GSCRIPT, finder.MODULE):
-        return ModFilter([os.path.splitext(nm)[0]])
-    if typ == finder.PACKAGE:
-        return PkgFilter([fullname])
-    if typ == finder.DIRECTORY:
-        return DirFilter([fullname])
-    if typ in (finder.BINARY, finder.PBINARY):
-        return FileFilter([nm])
-    return FileFilter([fullname])
-  
-class _Filter:
-    def __repr__(self):
-        return '<'+self.__class__.__name__+' '+repr(self.elements)+'>'
-    
-class _NameFilter(_Filter):
-    """ A filter mixin that matches (exactly) on name """
-    def matches(self, res):
-        return self.elements.get(res.name, 0)
-        
-class _PathFilter(_Filter):
-    """ A filter mixin that matches if the resource is below any of the paths"""
-    def matches(self, res):
-        p = os.path.normcase(os.path.abspath(res.path))
-        while len(p) > 3:
-            p = os.path.dirname(p)
-            if self.elements.get(p, 0):
-                return 1
-        return 0
-        
-class _ExtFilter(_Filter):
-    """ A filter mixin that matches based on file extensions (either way) """
-    include = 0
-    def matches(self, res):
-        fnd = self.elements.get(os.path.splitext(res.path)[1], 0)
-        if self.include:
-            return not fnd
-        return fnd
-    
-class _TypeFilter(_Filter):
-    """ A filter mixin that matches on resource type (either way) """
-    include = 0
-    def matches(self, res):
-        fnd = self.elements.get(res.typ, 0)
-        if self.include:
-            return not fnd
-        return fnd
-
-class _PatternFilter(_Filter):
-    """ A filter that matches if re.search succeeds on the resource path """
-    def matches(self, res):
-        for regex in self.elements:
-            if regex.search(res.path):
-                return 1
-        return 0
-    
-class ExtFilter(_ExtFilter):
-    """ A file extension filter.
-        ExtFilter(extlist, include=0)
-        where extlist is a list of file extensions """
-    def __init__(self, extlist, include=0):
-        self.elements = {}
-        for ext in extlist:
-            if ext[0:1] != '.':
-                ext = '.'+ext
-            self.elements[ext] = 1
-        self.include = include
-
-class TypeFilter(_TypeFilter):
-    """ A filter for resource types.
-        TypeFilter(typlist, include=0)
-        where typlist is a subset of ['a','b','d','m','p','s','x','z'] """
-    def __init__(self, typlist, include=0):
-        self.elements = {}
-        for typ in typlist:
-            self.elements[typ] = 1
-        self.include = include
-
-class FileFilter(_NameFilter):
-    """ A filter for data files """
-    def __init__(self, filelist):
-        self.elements = {}
-        for f in filelist:
-            self.elements[f] = 1
-              
-class ModFilter(_NameFilter):
-    """ A filter for Python modules.
-        ModFilter(modlist) where modlist is eg ['macpath', 'dospath'] """
-    def __init__(self, modlist):
-        self.elements = {}
-        for mod in modlist:
-            self.elements[mod] = 1
-            
-class DirFilter(_PathFilter):
-    """ A filter based on directories.
-        DirFilter(dirlist)
-        dirs may be relative and will be normalized.
-        Subdirectories of dirs will be excluded. """
-    def __init__(self, dirlist):
-        self.elements = {}
-        for pth in dirlist:
-            pth = os.path.normcase(os.path.abspath(pth))
-            self.elements[pth] = 1
-            
-class PkgFilter(_PathFilter):
-    """At this time, identical to a DirFilter (being lazy) """
-    def __init__(self, pkglist):
-        #warning - pkgs are expected to be full directories
-        self.elements = {}
-        for pkg in pkglist:
-            pth = os.path.normcase(os.path.abspath(pkg))
-            self.elements[pth] = 1
-            
-class StdLibFilter(_PathFilter):
-    """ A filter that excludes anything found in the standard library """
-    def __init__(self):
-        pth = os.path.normcase(os.path.join(sys.exec_prefix, 'lib'))
-        self.elements = {pth:1}
-     
-class PatternFilter(_PatternFilter):
-    """ A filter that excludes if any pattern is found in resource's path """
-    def __init__(self, patterns):
-        self.elements = []
-        for pat in patterns:
-            self.elements.append(re.compile(pat))

+ 0 - 557
direct/src/showbase/BpDb.py

@@ -1,557 +0,0 @@
-
-import inspect
-import sys
-
-#Bpdb - breakpoint debugging system (kanpatel - 04/2010)
-class BpMan:
-    def __init__(self):
-        self.bpInfos = {}
-    
-    def partsToPath(self, parts):
-        cfg = parts.get('cfg')
-        grp = parts.get('grp')
-        id = parts.get('id','*')
-        path = ''
-        if cfg:
-            path += '%s'%(cfg,)
-            if grp or id:
-                path += '::'
-        if grp:
-            path += '%s'%(grp,)
-        if isinstance(id, int):
-            path += '(%s)'%(id,)
-        elif grp:
-            path += '.%s'%(id,)
-        else:
-            path += '%s'%(id,)
-        return path
-
-    def pathToParts(self, path=None):
-        parts = {'cfg':None, 'grp':None, 'id':None}
-
-        #verify input        
-        if not isinstance(path, type('')):
-            assert not "error: argument must be string of form '[cfg::][grp.]id'"
-            return parts
-
-        #parse cfg                
-        tokens = path.split('::')
-        if (len(tokens) > 1) and (len(tokens[0]) > 0):
-            parts['cfg'] = tokens[0]
-            path = tokens[1]
-            
-        #parse grp
-        tokens = path.split('.')
-        if (len(tokens) == 1):
-            tokens = path.rsplit(')', 1)
-            if (len(tokens) > 1) and (tokens[-1] == ''):
-                tokens = tokens[-2].rsplit('(', 1)
-                if (len(tokens) > 1):
-                    try:
-                        verifyInt = int(tokens[-1])
-                        parts['grp'] = tokens[0]
-                        path = tokens[-1]
-                    except:
-                        pass
-        elif (len(tokens) > 1) and (len(tokens[0]) > 0):
-            parts['grp'] = tokens[0]
-            path = tokens[1]
-
-        #parse id
-        if (len(path) > 0):
-            parts['id'] = path
-        if parts['id'] == '*':
-            parts['id'] = None
-
-        #done
-        return parts
-
-    def bpToPath(self, bp):
-        if type(bp) is type(''):
-            bp = self.pathToParts(bp)
-        return self.partsToPath(bp)
-        
-    def bpToParts(self, bp):
-        if type(bp) is type({}):
-            bp = self.partsToPath(bp)
-        return self.pathToParts(bp)
-        
-    def makeBpInfo(self, grp, id):
-        self.bpInfos.setdefault(grp, {None:{},})
-        self.bpInfos[grp].setdefault(id, {})
-
-    def getEnabled(self, bp):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(parts['grp'], parts['id'])
-        if not self.bpInfos[grp][None].get('enabled', True):
-            return False
-        if not self.bpInfos[grp][id].get('enabled', True):
-            return False
-        return True
-
-    def setEnabled(self, bp, enabled=True):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        self.bpInfos[grp][id]['enabled'] = enabled
-        return enabled
-
-    def toggleEnabled(self, bp):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        newEnabled = not self.bpInfos[grp][id].get('enabled', True)
-        self.bpInfos[grp][id]['enabled'] = newEnabled
-        return newEnabled
-
-    def getIgnoreCount(self, bp, decrement=False):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        ignoreCount = self.bpInfos[grp][id].get('ignoreCount', 0)
-        if ignoreCount > 0 and decrement:
-            self.bpInfos[grp][id]['ignoreCount'] = ignoreCount - 1
-        return ignoreCount
-
-    def setIgnoreCount(self, bp, ignoreCount=0):
-        if not isinstance(ignoreCount, int):
-            print 'error: first argument should be integer ignoreCount'
-            return
-            
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        self.bpInfos[grp][id]['ignoreCount'] = ignoreCount
-        return ignoreCount
-
-    def getLifetime(self, bp):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        lifetime = self.bpInfos[grp][id].get('lifetime', -1)
-        return lifetime
-        
-    def setLifetime(self, bp, newLifetime):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        self.bpInfos[grp][id]['lifetime'] = newLifetime
-        return lifetime
-        
-    def decLifetime(self, bp):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        lifetime = self.bpInfos[grp][id].get('lifetime', -1)
-        if lifetime > 0:
-            lifetime = lifetime - 1
-        self.bpInfos[grp][id]['lifetime'] = lifetime
-        return lifetime
-
-    def getHitCount(self, bp):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        return self.bpInfos[grp][id].get('count', 0)
-        
-    def setHitCount(self, bp, newHitCount):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        self.bpInfos[grp][id]['count'] = newHitCount
-        
-    def incHitCount(self, bp):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        self.bpInfos[grp][id]['count'] = self.bpInfos[grp][id].get('count', 0) + 1
-        
-    def resetBp(self, bp):
-        parts = self.bpToParts(bp)
-        grp, id = parts['grp'], parts['id']
-        self.makeBpInfo(grp, id)
-        self.bpInfos[grp][id] = {}
-        if id is None:
-            del self.bpInfos[grp]
-    
-class BpDb:
-    def __init__(self):
-        self.enabled = True
-        self.cfgInfos = { None:True }
-        self.codeInfoCache = {}
-        self.bpMan = BpMan()
-        self.lastBp = None
-        self.pdbAliases = {}
-        self.configCallback = None
-
-    def setEnabledCallback(self, callback):
-        self.enabledCallback = callback
-
-    def verifyEnabled(self):
-        if self.enabledCallback:
-            return self.enabledCallback()
-        return True
-
-    def setConfigCallback(self, callback):
-        self.configCallback = callback
-                
-    def verifySingleConfig(self, cfg):
-        if cfg in self.cfgInfos:
-            return self.cfgInfos[cfg]
-        return not self.configCallback or self.configCallback(cfg)
-
-    def verifyConfig(self, cfg):
-        cfgList = choice(isinstance(cfg, tuple), cfg, (cfg,))
-        passedCfgs = [c for c in cfgList if self.verifySingleConfig(c)]
-        return (len(passedCfgs) > 0)
-
-    def toggleConfig(self, cfg):
-        self.cfgInfos[cfg] = not self.verifyConfig(cfg)
-        return self.cfgInfos[cfg]
-
-    def resetConfig(self, cfg):
-        self.cfgInfos.pop(cfg, None)
-
-    #setup bpdb prompt commands
-    def displayHelp(self):
-        print 'You may use normal pdb commands plus the following:'
-        #print '    cmd  [param <def>]  [cmd] does )this( with [param] (default is def)'
-        #print '    -----------------------------------------------------------------------'
-        print '    _i   [n <0> [, path=<curr>]] set ignore count for bp [path] to [n]'
-        print '    _t   [path <curr>]   toggle bp [path]'
-        print '    _tg  [grp <curr>]    toggle grp'
-        print '    _tc  [cfg <curr>]    toggle cfg'
-        print '    _z   [path <curr>]   clear all settings for bp [path]'
-        print '    _zg  [grp <curr>]    clear all settings for grp'
-        print '    _zc  [cfg <curr>]    clear all settings for cfg (restore .prc setting)'
-        print '    _h                   displays this usage help'
-        print '    _ua                  unalias these commands from pdb'
-
-    def addPdbAliases(self):
-        self.makePdbAlias('_i', 'bpdb._i(%*)')
-        self.makePdbAlias('_t', 'bpdb._t(%*)')
-        self.makePdbAlias('_tg', 'bpdb._tg(%*)')
-        self.makePdbAlias('_tc', 'bpdb._tc(%*)')
-        self.makePdbAlias('_z', 'bpdb._z(%*)')
-        self.makePdbAlias('_zg', 'bpdb._zg(%*)')
-        self.makePdbAlias('_zc', 'bpdb._zc(%*)')
-        self.makePdbAlias('_h', 'bpdb.displayHelp()')
-        self.makePdbAlias('_ua', 'bpdb.removePdbAliases()')
-
-    def makePdbAlias(self, aliasName, aliasCmd):
-        self.pdbAliases[aliasName] = aliasCmd
-        self.pdb.do_alias('%s %s'%(aliasName,aliasCmd))
-
-    def removePdbAliases(self):
-        for aliasName in self.pdbAliases.iterkeys():
-            self.pdb.do_unalias(aliasName)
-        self.pdbAliases = {}
-        print '(bpdb aliases removed)'
-
-    #handle bpdb prompt commands by forwarding to bpMan
-    def _e(self, *args, **kwargs):
-        bp = self._getArg(args, [type(''),type({}),], kwargs, ['path','bp','name',], self.lastBp)
-        enabled = self._getArg(args, [type(True),type(1),], kwargs, ['enabled','on',], True)
-        newEnabled = self.bpMan.setEnabled(bp, enabled)
-        print "'%s' is now %s."%(self.bpMan.bpToPath(bp),choice(newEnabled,'enabled','disabled'),)
-        
-    def _i(self, *args, **kwargs):
-        bp = self._getArg(args, [type(''),type({}),], kwargs, ['path','bp','name',], self.lastBp)
-        count = self._getArg(args, [type(1),], kwargs, ['ignoreCount','count','n',], 0)
-        newCount = self.bpMan.setIgnoreCount(bp, count)
-        print "'%s' will ignored %s times."%(self.bpMan.bpToPath(bp),newCount,)
-
-    def _t(self, *args, **kwargs):
-        bp = self._getArg(args, [type(''),type({}),], kwargs, ['path','bp','name',], self.lastBp)
-        newEnabled = self.bpMan.toggleEnabled(bp)
-        print "'%s' is now %s."%(self.bpMan.bpToPath(bp),choice(newEnabled,'enabled','disabled'),)
-        
-    def _tg(self, *args, **kwargs):
-        bp = self._getArg(args, [type(''),type({}),], kwargs, ['grp',], self.lastBp)
-        if type(bp) == type(''):
-            bp = {'grp':bp}
-        bp = {'grp':bp.get('grp')}
-        newEnabled = self.bpMan.toggleEnabled(bp)
-        print "'%s' is now %s."%(self.bpMan.bpToPath(bp),choice(newEnabled,'enabled','disabled'),)
-        
-    def _tc(self, *args, **kwargs):
-        bp = self._getArg(args, [type(''),type({}),], kwargs, ['cfg',], self.lastBp)
-        if type(bp) == type(''):
-            bp = {'cfg':bp}
-        bp = {'cfg':bp.get('cfg')}
-        newEnabled = self.toggleConfig(bp['cfg'])
-        print "'%s' is now %s."%(self.bpMan.bpToPath(bp),choice(newEnabled,'enabled','disabled'),)
-        
-    def _z(self, *args, **kwargs):
-        bp = self._getArg(args, [type(''),type({}),], kwargs, ['path','bp','name',], self.lastBp)
-        self.bpMan.resetBp(bp)
-        print "'%s' has been reset."%(self.bpMan.partsToPath(bp),)
-
-    def _zg(self, *args, **kwargs):
-        bp = self._getArg(args, [type(''),type({}),], kwargs, ['grp',], self.lastBp)
-        if type(bp) == type(''):
-            bp = {'grp':bp}
-        bp = {'grp':bp.get('grp')}
-        self.bpMan.resetBp(bp)
-        print "'%s' has been reset."%(self.bpMan.partsToPath(bp),)
-
-    def _zc(self, *args, **kwargs):
-        bp = self._getArg(args, [type(''),type({}),], kwargs, ['cfg',], self.lastBp)
-        if type(bp) == type(''):
-            bp = {'cfg':bp}
-        bp = {'cfg':bp.get('cfg')}
-        self.resetConfig(bp['cfg'])
-        print "'%s' has been reset."%(self.bpMan.bpToPath(bp),)
- 
-    def _getArg(self, args, goodTypes, kwargs, goodKeys, default = None):
-        #look for desired arg in args and kwargs lists
-        argVal = default
-        for val in args:
-            if type(val) in goodTypes:
-                argVal = val
-        for key in goodKeys:
-            if key in kwargs:
-                argVal = kwargs[key]
-        return argVal
-                
-    #code for automatically determining param vals
-    def getFrameCodeInfo(self, frameCount=1):
-        #get main bits
-        stack = inspect.stack()
-        try:
-            primaryFrame = stack[frameCount][0]
-        except:
-            return ('<stdin>', None, -1)
-
-        #todo: 
-        #frameInfo is inadequate as a unique marker for this code location
-        #caching disabled until suitable replacement is found
-        #
-        #frameInfo = inspect.getframeinfo(primaryFrame)
-        #frameInfo = (frameInfo[0], frameInfo[1])
-        #check cache
-        #codeInfo = self.codeInfoCache.get(frameInfo)
-        #if codeInfo:
-        #    return codeInfo
-        
-        #look for module name
-        moduleName = None
-        callingModule = inspect.getmodule(primaryFrame)
-        if callingModule and callingModule.__name__ != '__main__':
-            moduleName = callingModule.__name__
-
-        #look for class name
-        className = None
-        for i in range(frameCount, len(stack)):
-            callingContexts = stack[i][4]
-            if callingContexts:
-                contextTokens = callingContexts[0].split()
-                if contextTokens[0] in ['class','def'] and len(contextTokens) > 1:
-                    callingContexts[0] = callingContexts[0].replace('(',' ').replace(':',' ')
-                    contextTokens = callingContexts[0].split()
-                    className = contextTokens[1]
-                    break
-        if className is None:
-            #look for self (this functions inappropriately for inherited classes)
-            slf = primaryFrame.f_locals.get('self')
-            try:
-                if slf:
-                    className = slf.__class__.__name__
-            except:
-                #in __init__ 'self' exists but 'if slf' will crash
-                pass
-
-        #get line number
-        def byteOffsetToLineno(code, byte):
-            # Returns the source line number corresponding to the given byte
-            # offset into the indicated Python code module.
-            import array
-            lnotab = array.array('B', code.co_lnotab)
-            line   = code.co_firstlineno
-            for i in range(0, len(lnotab), 2):
-                byte -= lnotab[i]
-                if byte <= 0:
-                    return line
-                line += lnotab[i+1]
-            return line
-
-        lineNumber = byteOffsetToLineno(primaryFrame.f_code, primaryFrame.f_lasti)
-        #frame = inspect.stack()[frameCount][0]
-        #lineno = byteOffsetToLineno(frame.f_code, frame.f_lasti)
-
-        codeInfo = (moduleName, className, lineNumber)
-        #self.codeInfoCache[frameInfo] = codeInfo
-        return codeInfo
-    
-    #actually deliver the user a prompt
-    def set_trace(self, bp, frameCount=1):
-        #find useful frame
-        self.currFrame = sys._getframe()
-        interactFrame = self.currFrame
-        while frameCount > 0:
-            interactFrame = interactFrame.f_back
-            frameCount -= 1
-
-        #cache this as the latest bp
-        self.lastBp = bp.getParts()
-        #set up and start debuggger
-        import pdb
-        self.pdb = pdb.Pdb()
-        #self.pdb.do_alias('aa bpdb.addPdbAliases()')        
-        self.addPdbAliases()
-        self.pdb.set_trace(interactFrame);
-        
-    #bp invoke methods
-    def bp(self, id=None, grp=None, cfg=None, iff=True, enabled=True, test=None, frameCount=1):
-        if not (self.enabled and self.verifyEnabled()):
-            return
-        if not (enabled and iff):
-            return
-            
-        bpi = bp(id=id, grp=grp, cfg=cfg, frameCount=frameCount+1)
-        bpi.maybeBreak(test=test, frameCount=frameCount+1)
-
-    def bpCall(self,id=None,grp=None,cfg=None,iff=True,enabled=True,test=None,frameCount=1,onEnter=1,onExit=0):
-        def decorator(f):
-            return f
-
-        if not (self.enabled and self.verifyEnabled()):
-            return decorator
-        if not (enabled and iff):
-            return decorator
-        
-        bpi = bp(id=id, grp=grp, cfg=cfg, frameCount=frameCount+1)
-        if bpi.disabled:
-            return decorator
-
-        def decorator(f):
-            def wrap(*args, **kwds):
-                #create our bp object
-                dbp = bp(id=id or f.__name__, grp=bpi.grp, cfg=bpi.cfg, frameCount=frameCount+1)
-                if onEnter:
-                    dbp.maybeBreak(test=test,frameCount=frameCount+1,displayPrefix='Calling ')
-                f_result = f(*args, **kwds)
-                if onExit:
-                    dbp.maybeBreak(test=test,frameCount=frameCount+1,displayPrefix='Exited ')
-                return f_result
-                
-            wrap.func_name = f.func_name
-            wrap.func_dict = f.func_dict
-            wrap.func_doc = f.func_doc
-            wrap.__module__ = f.__module__
-            return wrap
-            
-        return decorator
-        
-    def bpPreset(self, *args, **kArgs):
-        def functor(*cArgs, **ckArgs):
-            return
-        if kArgs.get('call', None):
-            def functor(*cArgs, **ckArgs):
-                def decorator(f):
-                    return f
-                return decorator
-
-        if self.enabled and self.verifyEnabled():
-            argsCopy = args[:]
-            def functor(*cArgs, **ckArgs):
-                kwArgs = {}
-                kwArgs.update(kArgs)
-                kwArgs.update(ckArgs)
-                kwArgs.pop('static', None)
-                kwArgs['frameCount'] = ckArgs.get('frameCount',1)+1
-                if kwArgs.pop('call', None):
-                    return self.bpCall(*(cArgs), **kwArgs)
-                else:
-                    return self.bp(*(cArgs), **kwArgs)
-
-        if kArgs.get('static', None):
-            return staticmethod(functor)
-        else:
-            return functor
-
-    #deprecated:
-    @staticmethod
-    def bpGroup(*args, **kArgs):
-        print "BpDb.bpGroup is deprecated, use bpdb.bpPreset instead"
-        kwArgs = {}
-        kwArgs.update(kArgs)
-        kwArgs['frameCount'] = kArgs.get('frameCount', 1) + 1
-        return bpdb.bpPreset(*(args), **(kwArgs))
-
-
-class bp:
-    def __init__(self, id=None, grp=None, cfg=None, frameCount=1):
-        #check early out conditions 
-        self.disabled = False
-        if not bpdb.enabled:
-            self.disabled = True
-            return
-        
-        #default cfg, grp, id from calling code info
-        moduleName, className, lineNumber = bpdb.getFrameCodeInfo(frameCount=frameCount+1)
-        if moduleName:  #use only leaf module name
-            moduleName = moduleName.split('.')[-1]
-        self.grp = grp or className or moduleName
-        self.id = id or lineNumber
-
-        #default cfg to stripped module name
-        if cfg is None and moduleName:
-            cfg = moduleName.lower()
-            if cfg.find("distributed") == 0:        #prune leading 'Distributed'
-                cfg = cfg[len("distributed"):]
-
-        # check cfgs
-        self.cfg = cfg
-        if not bpdb.verifyConfig(self.cfg):
-            self.disabled = True
-            return
-
-    def getParts(self):
-        return {'id':self.id,'grp':self.grp,'cfg':self.cfg}
-        
-    def displayContextHint(self, displayPrefix=''):
-        contextString = displayPrefix + bpdb.bpMan.partsToPath({'id':self.id,'grp':self.grp,'cfg':self.cfg})
-        dashes = '-'*max(0, (80 - len(contextString) - 4) / 2)
-        print '<%s %s %s>'%(dashes,contextString,dashes)
-    
-    def maybeBreak(self, test=None, frameCount=1, displayPrefix=''):
-        if self.shouldBreak(test=test):
-            self.doBreak(frameCount=frameCount+1,displayPrefix=displayPrefix)
-    
-    def shouldBreak(self, test=None):
-        #check easy early out
-        if self.disabled:
-            return False
-        if test:
-            if not isinstance(test, (list, tuple)):
-                test = (test,)
-            for atest in test:
-                if not atest():
-                    return False
-
-        #check disabled conditions
-        if not bpdb.bpMan.getEnabled({'grp':self.grp,'id':self.id}):
-            return False
-        if not bpdb.verifyConfig(self.cfg):
-            return False
-
-        #check skip conditions
-        if bpdb.bpMan.getIgnoreCount({'grp':self.grp,'id':self.id},decrement=True):
-            return False
-        if bpdb.bpMan.getLifetime({'grp':self.grp,'id':self.id}) == 0:
-            return False
-
-        #all conditions go
-        return True
-        
-    def doBreak(self, frameCount=1,displayPrefix=''):
-        #accumulate hit count
-        bpdb.bpMan.decLifetime({'grp':self.grp,'id':self.id})
-        bpdb.bpMan.incHitCount({'grp':self.grp,'id':self.id})
-        
-        #setup debugger
-        self.displayContextHint(displayPrefix=displayPrefix)
-        bpdb.set_trace(self, frameCount=frameCount+1)
-

+ 1 - 1
direct/src/showbase/BufferViewer.py

@@ -2,7 +2,7 @@
 
 __all__ = ['BufferViewer']
 
-from pandac.PandaModules import *
+from panda3d.core import *
 from direct.task import Task
 from direct.directnotify.DirectNotifyGlobal import *
 from direct.showbase.DirectObject import DirectObject

+ 0 - 4
direct/src/showbase/DirectObject.py

@@ -5,7 +5,6 @@ __all__ = ['DirectObject']
 
 from direct.directnotify.DirectNotifyGlobal import directNotify
 from MessengerGlobal import messenger
-from direct.showbase.PythonUtil import ClassTree
 
 class DirectObject:
     """
@@ -42,9 +41,6 @@ class DirectObject:
     def isIgnoring(self, event):
         return messenger.isIgnoring(event, self)
 
-    def classTree(self):
-        return ClassTree(self)
-
     #This function must be used if you want a managed task
     def addTask(self, *args, **kwargs):
         if(not hasattr(self,"_taskList")):

+ 0 - 1251
direct/src/showbase/ElementTree.py

@@ -1,1251 +0,0 @@
-#
-# ElementTree
-# $Id$
-#
-# light-weight XML support for Python 1.5.2 and later.
-#
-# history:
-# 2001-10-20 fl   created (from various sources)
-# 2001-11-01 fl   return root from parse method
-# 2002-02-16 fl   sort attributes in lexical order
-# 2002-04-06 fl   TreeBuilder refactoring, added PythonDoc markup
-# 2002-05-01 fl   finished TreeBuilder refactoring
-# 2002-07-14 fl   added basic namespace support to ElementTree.write
-# 2002-07-25 fl   added QName attribute support
-# 2002-10-20 fl   fixed encoding in write
-# 2002-11-24 fl   changed default encoding to ascii; fixed attribute encoding
-# 2002-11-27 fl   accept file objects or file names for parse/write
-# 2002-12-04 fl   moved XMLTreeBuilder back to this module
-# 2003-01-11 fl   fixed entity encoding glitch for us-ascii
-# 2003-02-13 fl   added XML literal factory
-# 2003-02-21 fl   added ProcessingInstruction/PI factory
-# 2003-05-11 fl   added tostring/fromstring helpers
-# 2003-05-26 fl   added ElementPath support
-# 2003-07-05 fl   added makeelement factory method
-# 2003-07-28 fl   added more well-known namespace prefixes
-# 2003-08-15 fl   fixed typo in ElementTree.findtext (Thomas Dartsch)
-# 2003-09-04 fl   fall back on emulator if ElementPath is not installed
-# 2003-10-31 fl   markup updates
-# 2003-11-15 fl   fixed nested namespace bug
-# 2004-03-28 fl   added XMLID helper
-# 2004-06-02 fl   added default support to findtext
-# 2004-06-08 fl   fixed encoding of non-ascii element/attribute names
-# 2004-08-23 fl   take advantage of post-2.1 expat features
-# 2005-02-01 fl   added iterparse implementation
-# 2005-03-02 fl   fixed iterparse support for pre-2.2 versions
-#
-# Copyright (c) 1999-2005 by Fredrik Lundh.  All rights reserved.
-#
-# [email protected]
-# http://www.pythonware.com
-#
-# --------------------------------------------------------------------
-# The ElementTree toolkit is
-#
-# Copyright (c) 1999-2005 by Fredrik Lundh
-#
-# By obtaining, using, and/or copying this software and/or its
-# associated documentation, you agree that you have read, understood,
-# and will comply with the following terms and conditions:
-#
-# Permission to use, copy, modify, and distribute this software and
-# its associated documentation for any purpose and without fee is
-# hereby granted, provided that the above copyright notice appears in
-# all copies, and that both that copyright notice and this permission
-# notice appear in supporting documentation, and that the name of
-# Secret Labs AB or the author not be used in advertising or publicity
-# pertaining to distribution of the software without specific, written
-# prior permission.
-#
-# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
-# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
-# ABILITY AND FITNESS.  IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
-# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
-# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
-# OF THIS SOFTWARE.
-# --------------------------------------------------------------------
-
-__all__ = [
-    # public symbols
-    "Comment",
-    "dump",
-    "Element", "ElementTree",
-    "fromstring",
-    "iselement", "iterparse",
-    "parse",
-    "PI", "ProcessingInstruction",
-    "QName",
-    "SubElement",
-    "tostring",
-    "TreeBuilder",
-    "VERSION", "XML",
-    "XMLTreeBuilder",
-    ]
-
-##
-# The <b>Element</b> type is a flexible container object, designed to
-# store hierarchical data structures in memory. The type can be
-# described as a cross between a list and a dictionary.
-# <p>
-# Each element has a number of properties associated with it:
-# <ul>
-# <li>a <i>tag</i>. This is a string identifying what kind of data
-# this element represents (the element type, in other words).</li>
-# <li>a number of <i>attributes</i>, stored in a Python dictionary.</li>
-# <li>a <i>text</i> string.</li>
-# <li>an optional <i>tail</i> string.</li>
-# <li>a number of <i>child elements</i>, stored in a Python sequence</li>
-# </ul>
-#
-# To create an element instance, use the {@link #Element} or {@link
-# #SubElement} factory functions.
-# <p>
-# The {@link #ElementTree} class can be used to wrap an element
-# structure, and convert it from and to XML.
-##
-
-import string, sys, re
-
-class _SimpleElementPath:
-    # emulate pre-1.2 find/findtext/findall behaviour
-    def find(self, element, tag):
-        for elem in element:
-            if elem.tag == tag:
-                return elem
-        return None
-    def findtext(self, element, tag, default=None):
-        for elem in element:
-            if elem.tag == tag:
-                return elem.text or ""
-        return default
-    def findall(self, element, tag):
-        if tag[:3] == ".//":
-            return element.getiterator(tag[3:])
-        result = []
-        for elem in element:
-            if elem.tag == tag:
-                result.append(elem)
-        return result
-
-try:
-    import ElementPath
-except ImportError:
-    # FIXME: issue warning in this case?
-    ElementPath = _SimpleElementPath()
-
-# TODO: add support for custom namespace resolvers/default namespaces
-# TODO: add improved support for incremental parsing
-
-VERSION = "1.2.6"
-
-##
-# Internal element class.  This class defines the Element interface,
-# and provides a reference implementation of this interface.
-# <p>
-# You should not create instances of this class directly.  Use the
-# appropriate factory functions instead, such as {@link #Element}
-# and {@link #SubElement}.
-#
-# @see Element
-# @see SubElement
-# @see Comment
-# @see ProcessingInstruction
-
-class _ElementInterface:
-    # <tag attrib>text<child/>...</tag>tail
-
-    ##
-    # (Attribute) Element tag.
-
-    tag = None
-
-    ##
-    # (Attribute) Element attribute dictionary.  Where possible, use
-    # {@link #_ElementInterface.get},
-    # {@link #_ElementInterface.set},
-    # {@link #_ElementInterface.keys}, and
-    # {@link #_ElementInterface.items} to access
-    # element attributes.
-
-    attrib = None
-
-    ##
-    # (Attribute) Text before first subelement.  This is either a
-    # string or the value None, if there was no text.
-
-    text = None
-
-    ##
-    # (Attribute) Text after this element's end tag, but before the
-    # next sibling element's start tag.  This is either a string or
-    # the value None, if there was no text.
-
-    tail = None # text after end tag, if any
-
-    def __init__(self, tag, attrib):
-        self.tag = tag
-        self.attrib = attrib
-        self._children = []
-
-    def __repr__(self):
-        return "<Element %s at %x>" % (self.tag, id(self))
-
-    ##
-    # Creates a new element object of the same type as this element.
-    #
-    # @param tag Element tag.
-    # @param attrib Element attributes, given as a dictionary.
-    # @return A new element instance.
-
-    def makeelement(self, tag, attrib):
-        return Element(tag, attrib)
-
-    ##
-    # Returns the number of subelements.
-    #
-    # @return The number of subelements.
-
-    def __len__(self):
-        return len(self._children)
-
-    ##
-    # Returns the given subelement.
-    #
-    # @param index What subelement to return.
-    # @return The given subelement.
-    # @exception IndexError If the given element does not exist.
-
-    def __getitem__(self, index):
-        return self._children[index]
-
-    ##
-    # Replaces the given subelement.
-    #
-    # @param index What subelement to replace.
-    # @param element The new element value.
-    # @exception IndexError If the given element does not exist.
-    # @exception AssertionError If element is not a valid object.
-
-    def __setitem__(self, index, element):
-        assert iselement(element)
-        self._children[index] = element
-
-    ##
-    # Deletes the given subelement.
-    #
-    # @param index What subelement to delete.
-    # @exception IndexError If the given element does not exist.
-
-    def __delitem__(self, index):
-        del self._children[index]
-
-    ##
-    # Returns a list containing subelements in the given range.
-    #
-    # @param start The first subelement to return.
-    # @param stop The first subelement that shouldn't be returned.
-    # @return A sequence object containing subelements.
-
-    def __getslice__(self, start, stop):
-        return self._children[start:stop]
-
-    ##
-    # Replaces a number of subelements with elements from a sequence.
-    #
-    # @param start The first subelement to replace.
-    # @param stop The first subelement that shouldn't be replaced.
-    # @param elements A sequence object with zero or more elements.
-    # @exception AssertionError If a sequence member is not a valid object.
-
-    def __setslice__(self, start, stop, elements):
-        for element in elements:
-            assert iselement(element)
-        self._children[start:stop] = list(elements)
-
-    ##
-    # Deletes a number of subelements.
-    #
-    # @param start The first subelement to delete.
-    # @param stop The first subelement to leave in there.
-
-    def __delslice__(self, start, stop):
-        del self._children[start:stop]
-
-    ##
-    # Adds a subelement to the end of this element.
-    #
-    # @param element The element to add.
-    # @exception AssertionError If a sequence member is not a valid object.
-
-    def append(self, element):
-        assert iselement(element)
-        self._children.append(element)
-
-    ##
-    # Inserts a subelement at the given position in this element.
-    #
-    # @param index Where to insert the new subelement.
-    # @exception AssertionError If the element is not a valid object.
-
-    def insert(self, index, element):
-        assert iselement(element)
-        self._children.insert(index, element)
-
-    ##
-    # Removes a matching subelement.  Unlike the <b>find</b> methods,
-    # this method compares elements based on identity, not on tag
-    # value or contents.
-    #
-    # @param element What element to remove.
-    # @exception ValueError If a matching element could not be found.
-    # @exception AssertionError If the element is not a valid object.
-
-    def remove(self, element):
-        assert iselement(element)
-        self._children.remove(element)
-
-    ##
-    # Returns all subelements.  The elements are returned in document
-    # order.
-    #
-    # @return A list of subelements.
-    # @defreturn list of Element instances
-
-    def getchildren(self):
-        return self._children
-
-    ##
-    # Finds the first matching subelement, by tag name or path.
-    #
-    # @param path What element to look for.
-    # @return The first matching element, or None if no element was found.
-    # @defreturn Element or None
-
-    def find(self, path):
-        return ElementPath.find(self, path)
-
-    ##
-    # Finds text for the first matching subelement, by tag name or path.
-    #
-    # @param path What element to look for.
-    # @param default What to return if the element was not found.
-    # @return The text content of the first matching element, or the
-    #     default value no element was found.  Note that if the element
-    #     has is found, but has no text content, this method returns an
-    #     empty string.
-    # @defreturn string
-
-    def findtext(self, path, default=None):
-        return ElementPath.findtext(self, path, default)
-
-    ##
-    # Finds all matching subelements, by tag name or path.
-    #
-    # @param path What element to look for.
-    # @return A list or iterator containing all matching elements,
-    #    in document order.
-    # @defreturn list of Element instances
-
-    def findall(self, path):
-        return ElementPath.findall(self, path)
-
-    ##
-    # Resets an element.  This function removes all subelements, clears
-    # all attributes, and sets the text and tail attributes to None.
-
-    def clear(self):
-        self.attrib.clear()
-        self._children = []
-        self.text = self.tail = None
-
-    ##
-    # Gets an element attribute.
-    #
-    # @param key What attribute to look for.
-    # @param default What to return if the attribute was not found.
-    # @return The attribute value, or the default value, if the
-    #     attribute was not found.
-    # @defreturn string or None
-
-    def get(self, key, default=None):
-        return self.attrib.get(key, default)
-
-    ##
-    # Sets an element attribute.
-    #
-    # @param key What attribute to set.
-    # @param value The attribute value.
-
-    def set(self, key, value):
-        self.attrib[key] = value
-
-    ##
-    # Gets a list of attribute names.  The names are returned in an
-    # arbitrary order (just like for an ordinary Python dictionary).
-    #
-    # @return A list of element attribute names.
-    # @defreturn list of strings
-
-    def keys(self):
-        return self.attrib.keys()
-
-    ##
-    # Gets element attributes, as a sequence.  The attributes are
-    # returned in an arbitrary order.
-    #
-    # @return A list of (name, value) tuples for all attributes.
-    # @defreturn list of (string, string) tuples
-
-    def items(self):
-        return self.attrib.items()
-
-    ##
-    # Creates a tree iterator.  The iterator loops over this element
-    # and all subelements, in document order, and returns all elements
-    # with a matching tag.
-    # <p>
-    # If the tree structure is modified during iteration, the result
-    # is undefined.
-    #
-    # @param tag What tags to look for (default is to return all elements).
-    # @return A list or iterator containing all the matching elements.
-    # @defreturn list or iterator
-
-    def getiterator(self, tag=None):
-        nodes = []
-        if tag == "*":
-            tag = None
-        if tag is None or self.tag == tag:
-            nodes.append(self)
-        for node in self._children:
-            nodes.extend(node.getiterator(tag))
-        return nodes
-
-# compatibility
-_Element = _ElementInterface
-
-##
-# Element factory.  This function returns an object implementing the
-# standard Element interface.  The exact class or type of that object
-# is implementation dependent, but it will always be compatible with
-# the {@link #_ElementInterface} class in this module.
-# <p>
-# The element name, attribute names, and attribute values can be
-# either 8-bit ASCII strings or Unicode strings.
-#
-# @param tag The element name.
-# @param attrib An optional dictionary, containing element attributes.
-# @param **extra Additional attributes, given as keyword arguments.
-# @return An element instance.
-# @defreturn Element
-
-def Element(tag, attrib={}, **extra):
-    attrib = attrib.copy()
-    attrib.update(extra)
-    return _ElementInterface(tag, attrib)
-
-##
-# Subelement factory.  This function creates an element instance, and
-# appends it to an existing element.
-# <p>
-# The element name, attribute names, and attribute values can be
-# either 8-bit ASCII strings or Unicode strings.
-#
-# @param parent The parent element.
-# @param tag The subelement name.
-# @param attrib An optional dictionary, containing element attributes.
-# @param **extra Additional attributes, given as keyword arguments.
-# @return An element instance.
-# @defreturn Element
-
-def SubElement(parent, tag, attrib={}, **extra):
-    attrib = attrib.copy()
-    attrib.update(extra)
-    element = parent.makeelement(tag, attrib)
-    parent.append(element)
-    return element
-
-##
-# Comment element factory.  This factory function creates a special
-# element that will be serialized as an XML comment.
-# <p>
-# The comment string can be either an 8-bit ASCII string or a Unicode
-# string.
-#
-# @param text A string containing the comment string.
-# @return An element instance, representing a comment.
-# @defreturn Element
-
-def Comment(text=None):
-    element = Element(Comment)
-    element.text = text
-    return element
-
-##
-# PI element factory.  This factory function creates a special element
-# that will be serialized as an XML processing instruction.
-#
-# @param target A string containing the PI target.
-# @param text A string containing the PI contents, if any.
-# @return An element instance, representing a PI.
-# @defreturn Element
-
-def ProcessingInstruction(target, text=None):
-    element = Element(ProcessingInstruction)
-    element.text = target
-    if text:
-        element.text = element.text + " " + text
-    return element
-
-PI = ProcessingInstruction
-
-##
-# QName wrapper.  This can be used to wrap a QName attribute value, in
-# order to get proper namespace handling on output.
-#
-# @param text A string containing the QName value, in the form {uri}local,
-#     or, if the tag argument is given, the URI part of a QName.
-# @param tag Optional tag.  If given, the first argument is interpreted as
-#     an URI, and this argument is interpreted as a local name.
-# @return An opaque object, representing the QName.
-
-class QName:
-    def __init__(self, text_or_uri, tag=None):
-        if tag:
-            text_or_uri = "{%s}%s" % (text_or_uri, tag)
-        self.text = text_or_uri
-    def __str__(self):
-        return self.text
-    def __hash__(self):
-        return hash(self.text)
-    def __cmp__(self, other):
-        if isinstance(other, QName):
-            return cmp(self.text, other.text)
-        return cmp(self.text, other)
-
-##
-# ElementTree wrapper class.  This class represents an entire element
-# hierarchy, and adds some extra support for serialization to and from
-# standard XML.
-#
-# @param element Optional root element.
-# @keyparam file Optional file handle or name.  If given, the
-#     tree is initialized with the contents of this XML file.
-
-class ElementTree:
-
-    def __init__(self, element=None, file=None):
-        assert element is None or iselement(element)
-        self._root = element # first node
-        if file:
-            self.parse(file)
-
-    ##
-    # Gets the root element for this tree.
-    #
-    # @return An element instance.
-    # @defreturn Element
-
-    def getroot(self):
-        return self._root
-
-    ##
-    # Replaces the root element for this tree.  This discards the
-    # current contents of the tree, and replaces it with the given
-    # element.  Use with care.
-    #
-    # @param element An element instance.
-
-    def _setroot(self, element):
-        assert iselement(element)
-        self._root = element
-
-    ##
-    # Loads an external XML document into this element tree.
-    #
-    # @param source A file name or file object.
-    # @param parser An optional parser instance.  If not given, the
-    #     standard {@link XMLTreeBuilder} parser is used.
-    # @return The document root element.
-    # @defreturn Element
-
-    def parse(self, source, parser=None):
-        if not hasattr(source, "read"):
-            source = open(source, "rb")
-        if not parser:
-            parser = XMLTreeBuilder()
-        while 1:
-            data = source.read(32768)
-            if not data:
-                break
-            parser.feed(data)
-        self._root = parser.close()
-        return self._root
-
-    ##
-    # Creates a tree iterator for the root element.  The iterator loops
-    # over all elements in this tree, in document order.
-    #
-    # @param tag What tags to look for (default is to return all elements)
-    # @return An iterator.
-    # @defreturn iterator
-
-    def getiterator(self, tag=None):
-        assert self._root is not None
-        return self._root.getiterator(tag)
-
-    ##
-    # Finds the first toplevel element with given tag.
-    # Same as getroot().find(path).
-    #
-    # @param path What element to look for.
-    # @return The first matching element, or None if no element was found.
-    # @defreturn Element or None
-
-    def find(self, path):
-        assert self._root is not None
-        if path[:1] == "/":
-            path = "." + path
-        return self._root.find(path)
-
-    ##
-    # Finds the element text for the first toplevel element with given
-    # tag.  Same as getroot().findtext(path).
-    #
-    # @param path What toplevel element to look for.
-    # @param default What to return if the element was not found.
-    # @return The text content of the first matching element, or the
-    #     default value no element was found.  Note that if the element
-    #     has is found, but has no text content, this method returns an
-    #     empty string.
-    # @defreturn string
-
-    def findtext(self, path, default=None):
-        assert self._root is not None
-        if path[:1] == "/":
-            path = "." + path
-        return self._root.findtext(path, default)
-
-    ##
-    # Finds all toplevel elements with the given tag.
-    # Same as getroot().findall(path).
-    #
-    # @param path What element to look for.
-    # @return A list or iterator containing all matching elements,
-    #    in document order.
-    # @defreturn list of Element instances
-
-    def findall(self, path):
-        assert self._root is not None
-        if path[:1] == "/":
-            path = "." + path
-        return self._root.findall(path)
-
-    ##
-    # Writes the element tree to a file, as XML.
-    #
-    # @param file A file name, or a file object opened for writing.
-    # @param encoding Optional output encoding (default is US-ASCII).
-
-    def write(self, file, encoding="us-ascii"):
-        assert self._root is not None
-        if not hasattr(file, "write"):
-            file = open(file, "wb")
-        if not encoding:
-            encoding = "us-ascii"
-        elif encoding != "utf-8" and encoding != "us-ascii":
-            file.write("<?xml version='1.0' encoding='%s'?>\n" % encoding)
-        self._write(file, self._root, encoding, {})
-
-    def _write(self, file, node, encoding, namespaces):
-        # write XML to file
-        tag = node.tag
-        if tag is Comment:
-            file.write("<!-- %s -->" % _escape_cdata(node.text, encoding))
-        elif tag is ProcessingInstruction:
-            file.write("<?%s?>" % _escape_cdata(node.text, encoding))
-        else:
-            items = node.items()
-            xmlns_items = [] # new namespaces in this scope
-            try:
-                if isinstance(tag, QName) or tag[:1] == "{":
-                    tag, xmlns = fixtag(tag, namespaces)
-                    if xmlns: xmlns_items.append(xmlns)
-            except TypeError:
-                _raise_serialization_error(tag)
-            file.write("<" + _encode(tag, encoding))
-            if items or xmlns_items:
-                items.sort() # lexical order
-                for k, v in items:
-                    try:
-                        if isinstance(k, QName) or k[:1] == "{":
-                            k, xmlns = fixtag(k, namespaces)
-                            if xmlns: xmlns_items.append(xmlns)
-                    except TypeError:
-                        _raise_serialization_error(k)
-                    try:
-                        if isinstance(v, QName):
-                            v, xmlns = fixtag(v, namespaces)
-                            if xmlns: xmlns_items.append(xmlns)
-                    except TypeError:
-                        _raise_serialization_error(v)
-                    file.write(" %s=\"%s\"" % (_encode(k, encoding),
-                                               _escape_attrib(v, encoding)))
-                for k, v in xmlns_items:
-                    file.write(" %s=\"%s\"" % (_encode(k, encoding),
-                                               _escape_attrib(v, encoding)))
-            if node.text or len(node):
-                file.write(">")
-                if node.text:
-                    file.write(_escape_cdata(node.text, encoding))
-                for n in node:
-                    self._write(file, n, encoding, namespaces)
-                file.write("</" + _encode(tag, encoding) + ">")
-            else:
-                file.write(" />")
-            for k, v in xmlns_items:
-                del namespaces[v]
-        if node.tail:
-            file.write(_escape_cdata(node.tail, encoding))
-
-# --------------------------------------------------------------------
-# helpers
-
-##
-# Checks if an object appears to be a valid element object.
-#
-# @param An element instance.
-# @return A true value if this is an element object.
-# @defreturn flag
-
-def iselement(element):
-    # FIXME: not sure about this; might be a better idea to look
-    # for tag/attrib/text attributes
-    return isinstance(element, _ElementInterface) or hasattr(element, "tag")
-
-##
-# Writes an element tree or element structure to sys.stdout.  This
-# function should be used for debugging only.
-# <p>
-# The exact output format is implementation dependent.  In this
-# version, it's written as an ordinary XML file.
-#
-# @param elem An element tree or an individual element.
-
-def dump(elem):
-    # debugging
-    if not isinstance(elem, ElementTree):
-        elem = ElementTree(elem)
-    elem.write(sys.stdout)
-    tail = elem.getroot().tail
-    if not tail or tail[-1] != "\n":
-        sys.stdout.write("\n")
-
-def _encode(s, encoding):
-    try:
-        return s.encode(encoding)
-    except AttributeError:
-        return s # 1.5.2: assume the string uses the right encoding
-
-_escape = re.compile(u"[&<>\"\u0080-\uffff]+")
-
-_escape_map = {
-    "&": "&amp;",
-    "<": "&lt;",
-    ">": "&gt;",
-    '"': "&quot;",
-}
-
-_namespace_map = {
-    # "well-known" namespace prefixes
-    "http://www.w3.org/XML/1998/namespace": "xml",
-    "http://www.w3.org/1999/xhtml": "html",
-    "http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf",
-    "http://schemas.xmlsoap.org/wsdl/": "wsdl",
-}
-
-def _raise_serialization_error(text):
-    raise TypeError(
-        "cannot serialize %r (type %s)" % (text, type(text).__name__)
-        )
-
-def _encode_entity(text, pattern=_escape):
-    # map reserved and non-ascii characters to numerical entities
-    def escape_entities(m, map=_escape_map):
-        out = []
-        append = out.append
-        for char in m.group():
-            text = map.get(char)
-            if text is None:
-                text = "&#%d;" % ord(char)
-            append(text)
-        return string.join(out, "")
-    try:
-        return _encode(pattern.sub(escape_entities, text), "ascii")
-    except TypeError:
-        _raise_serialization_error(text)
-
-#
-# the following functions assume an ascii-compatible encoding
-# (or "utf-16")
-
-def _escape_cdata(text, encoding=None):
-    # escape character data
-    try:
-        if encoding:
-            try:
-                text = _encode(text, encoding)
-            except UnicodeError:
-                return _encode_entity(text)
-        text = text.replace("&", "&amp;")
-        text = text.replace("<", "&lt;")
-        text = text.replace( ">", "&gt;")
-        return text
-    except (TypeError, AttributeError):
-        _raise_serialization_error(text)
-
-def _escape_attrib(text, encoding=None):
-    # escape attribute value
-    try:
-        if encoding:
-            try:
-                text = _encode(text, encoding)
-            except UnicodeError:
-                return _encode_entity(text)
-        text = text.replace("&", "&amp;")
-        text = text.replace("'", "&apos;") # FIXME: overkill
-        text = text.replace("\"", "&quot;")
-        text = text.replace("<", "&lt;")
-        text = text.replace(">", "&gt;")
-        return text
-    except (TypeError, AttributeError):
-        _raise_serialization_error(text)
-
-def fixtag(tag, namespaces):
-    # given a decorated tag (of the form {uri}tag), return prefixed
-    # tag and namespace declaration, if any
-    if isinstance(tag, QName):
-        tag = tag.text
-    namespace_uri, tag = string.split(tag[1:], "}", 1)
-    prefix = namespaces.get(namespace_uri)
-    if prefix is None:
-        prefix = _namespace_map.get(namespace_uri)
-        if prefix is None:
-            prefix = "ns%d" % len(namespaces)
-        namespaces[namespace_uri] = prefix
-        if prefix == "xml":
-            xmlns = None
-        else:
-            xmlns = ("xmlns:%s" % prefix, namespace_uri)
-    else:
-        xmlns = None
-    return "%s:%s" % (prefix, tag), xmlns
-
-##
-# Parses an XML document into an element tree.
-#
-# @param source A filename or file object containing XML data.
-# @param parser An optional parser instance.  If not given, the
-#     standard {@link XMLTreeBuilder} parser is used.
-# @return An ElementTree instance
-
-def parse(source, parser=None):
-    tree = ElementTree()
-    tree.parse(source, parser)
-    return tree
-
-##
-# Parses an XML document into an element tree incrementally, and reports
-# what's going on to the user.
-#
-# @param source A filename or file object containing XML data.
-# @param events A list of events to report back.  If omitted, only "end"
-#     events are reported.
-# @return A (event, elem) iterator.
-
-class iterparse:
-
-    def __init__(self, source, events=None):
-        if not hasattr(source, "read"):
-            source = open(source, "rb")
-        self._file = source
-        self._events = []
-        self._index = 0
-        self.root = self._root = None
-        self._parser = XMLTreeBuilder()
-        # wire up the parser for event reporting
-        parser = self._parser._parser
-        append = self._events.append
-        if events is None:
-            events = ["end"]
-        for event in events:
-            if event == "start":
-                try:
-                    parser.ordered_attributes = 1
-                    parser.specified_attributes = 1
-                    def handler(tag, attrib_in, event=event, append=append,
-                                start=self._parser._start_list):
-                        append((event, start(tag, attrib_in)))
-                    parser.StartElementHandler = handler
-                except AttributeError:
-                    def handler(tag, attrib_in, event=event, append=append,
-                                start=self._parser._start):
-                        append((event, start(tag, attrib_in)))
-                    parser.StartElementHandler = handler
-            elif event == "end":
-                def handler(tag, event=event, append=append,
-                            end=self._parser._end):
-                    append((event, end(tag)))
-                parser.EndElementHandler = handler
-            elif event == "start-ns":
-                def handler(prefix, uri, event=event, append=append):
-                    try:
-                        uri = _encode(uri, "ascii")
-                    except UnicodeError:
-                        pass
-                    append((event, (prefix or "", uri)))
-                parser.StartNamespaceDeclHandler = handler
-            elif event == "end-ns":
-                def handler(prefix, event=event, append=append):
-                    append((event, None))
-                parser.EndNamespaceDeclHandler = handler
-
-    def next(self):
-        while 1:
-            try:
-                item = self._events[self._index]
-            except IndexError:
-                if self._parser is None:
-                    self.root = self._root
-                    try:
-                        raise StopIteration
-                    except NameError:
-                        raise IndexError
-                # load event buffer
-                del self._events[:]
-                self._index = 0
-                data = self._file.read(16384)
-                if data:
-                    self._parser.feed(data)
-                else:
-                    self._root = self._parser.close()
-                    self._parser = None
-            else:
-                self._index = self._index + 1
-                return item
-
-    try:
-        iter
-        def __iter__(self):
-            return self
-    except NameError:
-        def __getitem__(self, index):
-            return self.next()
-
-##
-# Parses an XML document from a string constant.  This function can
-# be used to embed "XML literals" in Python code.
-#
-# @param source A string containing XML data.
-# @return An Element instance.
-# @defreturn Element
-
-def XML(text):
-    parser = XMLTreeBuilder()
-    parser.feed(text)
-    return parser.close()
-
-##
-# Parses an XML document from a string constant, and also returns
-# a dictionary which maps from element id:s to elements.
-#
-# @param source A string containing XML data.
-# @return A tuple containing an Element instance and a dictionary.
-# @defreturn (Element, dictionary)
-
-def XMLID(text):
-    parser = XMLTreeBuilder()
-    parser.feed(text)
-    tree = parser.close()
-    ids = {}
-    for elem in tree.getiterator():
-        id = elem.get("id")
-        if id:
-            ids[id] = elem
-    return tree, ids
-
-##
-# Parses an XML document from a string constant.  Same as {@link #XML}.
-#
-# @def fromstring(text)
-# @param source A string containing XML data.
-# @return An Element instance.
-# @defreturn Element
-
-fromstring = XML
-
-##
-# Generates a string representation of an XML element, including all
-# subelements.
-#
-# @param element An Element instance.
-# @return An encoded string containing the XML data.
-# @defreturn string
-
-def tostring(element, encoding=None):
-    class dummy:
-        pass
-    data = []
-    file = dummy()
-    file.write = data.append
-    ElementTree(element).write(file, encoding)
-    return string.join(data, "")
-
-##
-# Generic element structure builder.  This builder converts a sequence
-# of {@link #TreeBuilder.start}, {@link #TreeBuilder.data}, and {@link
-# #TreeBuilder.end} method calls to a well-formed element structure.
-# <p>
-# You can use this class to build an element structure using a custom XML
-# parser, or a parser for some other XML-like format.
-#
-# @param element_factory Optional element factory.  This factory
-#    is called to create new Element instances, as necessary.
-
-class TreeBuilder:
-
-    def __init__(self, element_factory=None):
-        self._data = [] # data collector
-        self._elem = [] # element stack
-        self._last = None # last element
-        self._tail = None # true if we're after an end tag
-        if element_factory is None:
-            element_factory = _ElementInterface
-        self._factory = element_factory
-
-    ##
-    # Flushes the parser buffers, and returns the toplevel documen
-    # element.
-    #
-    # @return An Element instance.
-    # @defreturn Element
-
-    def close(self):
-        assert len(self._elem) == 0, "missing end tags"
-        assert self._last != None, "missing toplevel element"
-        return self._last
-
-    def _flush(self):
-        if self._data:
-            if self._last is not None:
-                text = string.join(self._data, "")
-                if self._tail:
-                    assert self._last.tail is None, "internal error (tail)"
-                    self._last.tail = text
-                else:
-                    assert self._last.text is None, "internal error (text)"
-                    self._last.text = text
-            self._data = []
-
-    ##
-    # Adds text to the current element.
-    #
-    # @param data A string.  This should be either an 8-bit string
-    #    containing ASCII text, or a Unicode string.
-
-    def data(self, data):
-        self._data.append(data)
-
-    ##
-    # Opens a new element.
-    #
-    # @param tag The element name.
-    # @param attrib A dictionary containing element attributes.
-    # @return The opened element.
-    # @defreturn Element
-
-    def start(self, tag, attrs):
-        self._flush()
-        self._last = elem = self._factory(tag, attrs)
-        if self._elem:
-            self._elem[-1].append(elem)
-        self._elem.append(elem)
-        self._tail = 0
-        return elem
-
-    ##
-    # Closes the current element.
-    #
-    # @param tag The element name.
-    # @return The closed element.
-    # @defreturn Element
-
-    def end(self, tag):
-        self._flush()
-        self._last = self._elem.pop()
-        assert self._last.tag == tag,\
-               "end tag mismatch (expected %s, got %s)" % (
-                   self._last.tag, tag)
-        self._tail = 1
-        return self._last
-
-##
-# Element structure builder for XML source data, based on the
-# <b>expat</b> parser.
-#
-# @keyparam target Target object.  If omitted, the builder uses an
-#     instance of the standard {@link #TreeBuilder} class.
-# @keyparam html Predefine HTML entities.  This flag is not supported
-#     by the current implementation.
-# @see #ElementTree
-# @see #TreeBuilder
-
-class XMLTreeBuilder:
-
-    def __init__(self, html=0, target=None):
-        try:
-            from xml.parsers import expat
-        except ImportError:
-            raise ImportError(
-                "No module named expat; use SimpleXMLTreeBuilder instead"
-                )
-        self._parser = parser = expat.ParserCreate(None, "}")
-        if target is None:
-            target = TreeBuilder()
-        self._target = target
-        self._names = {} # name memo cache
-        # callbacks
-        parser.DefaultHandlerExpand = self._default
-        parser.StartElementHandler = self._start
-        parser.EndElementHandler = self._end
-        parser.CharacterDataHandler = self._data
-        # let expat do the buffering, if supported
-        try:
-            self._parser.buffer_text = 1
-        except AttributeError:
-            pass
-        # use new-style attribute handling, if supported
-        try:
-            self._parser.ordered_attributes = 1
-            self._parser.specified_attributes = 1
-            parser.StartElementHandler = self._start_list
-        except AttributeError:
-            pass
-        encoding = None
-        if not parser.returns_unicode:
-            encoding = "utf-8"
-        # target.xml(encoding, None)
-        self._doctype = None
-        self.entity = {}
-
-    def _fixtext(self, text):
-        # convert text string to ascii, if possible
-        try:
-            return _encode(text, "ascii")
-        except UnicodeError:
-            return text
-
-    def _fixname(self, key):
-        # expand qname, and convert name string to ascii, if possible
-        try:
-            name = self._names[key]
-        except KeyError:
-            name = key
-            if "}" in name:
-                name = "{" + name
-            self._names[key] = name = self._fixtext(name)
-        return name
-
-    def _start(self, tag, attrib_in):
-        fixname = self._fixname
-        tag = fixname(tag)
-        attrib = {}
-        for key, value in attrib_in.items():
-            attrib[fixname(key)] = self._fixtext(value)
-        return self._target.start(tag, attrib)
-
-    def _start_list(self, tag, attrib_in):
-        fixname = self._fixname
-        tag = fixname(tag)
-        attrib = {}
-        if attrib_in:
-            for i in range(0, len(attrib_in), 2):
-                attrib[fixname(attrib_in[i])] = self._fixtext(attrib_in[i+1])
-        return self._target.start(tag, attrib)
-
-    def _data(self, text):
-        return self._target.data(self._fixtext(text))
-
-    def _end(self, tag):
-        return self._target.end(self._fixname(tag))
-
-    def _default(self, text):
-        prefix = text[:1]
-        if prefix == "&":
-            # deal with undefined entities
-            try:
-                self._target.data(self.entity[text[1:-1]])
-            except KeyError:
-                from xml.parsers import expat
-                raise expat.error(
-                    "undefined entity %s: line %d, column %d" %
-                    (text, self._parser.ErrorLineNumber,
-                    self._parser.ErrorColumnNumber)
-                    )
-        elif prefix == "<" and text[:9] == "<!DOCTYPE":
-            self._doctype = [] # inside a doctype declaration
-        elif self._doctype is not None:
-            # parse doctype contents
-            if prefix == ">":
-                self._doctype = None
-                return
-            text = string.strip(text)
-            if not text:
-                return
-            self._doctype.append(text)
-            n = len(self._doctype)
-            if n > 2:
-                type = self._doctype[1]
-                if type == "PUBLIC" and n == 4:
-                    name, type, pubid, system = self._doctype
-                elif type == "SYSTEM" and n == 3:
-                    name, type, system = self._doctype
-                    pubid = None
-                else:
-                    return
-                if pubid:
-                    pubid = pubid[1:-1]
-                self.doctype(name, pubid, system[1:-1])
-                self._doctype = None
-
-    ##
-    # Handles a doctype declaration.
-    #
-    # @param name Doctype name.
-    # @param pubid Public identifier.
-    # @param system System identifier.
-
-    def doctype(self, name, pubid, system):
-        pass
-
-    ##
-    # Feeds data to the parser.
-    #
-    # @param data Encoded data.
-
-    def feed(self, data):
-        self._parser.Parse(data, 0)
-
-    ##
-    # Finishes feeding data to the parser.
-    #
-    # @return An element structure.
-    # @defreturn Element
-
-    def close(self):
-        self._parser.Parse("", 1) # end of data
-        tree = self._target.close()
-        del self._target, self._parser # get rid of circular references
-        return tree

+ 8 - 36
direct/src/showbase/EventManager.py

@@ -5,11 +5,8 @@ __all__ = ['EventManager']
 
 from MessengerGlobal import *
 from direct.directnotify.DirectNotifyGlobal import *
-
-# This module may not import pandac.PandaModules, since it is imported
-# by the Toontown Launcher before the complete PandaModules have been
-# downloaded.
-#from pandac.PandaModules import *
+from direct.task.TaskManagerGlobal import taskMgr
+from panda3d.core import PStatCollector, EventQueue, EventHandler
 
 class EventManager:
 
@@ -18,11 +15,6 @@ class EventManager:
     # delayed import, since this is imported by the Toontown Launcher
     # before the complete PandaModules have been downloaded.
     PStatCollector = None
-
-    # for efficiency, only call import once per module
-    EventStorePandaNode = None
-    EventQueue = None
-    EventHandler = None
     
     def __init__(self, eventQueue = None):
         """
@@ -43,7 +35,6 @@ class EventManager:
         """
         if self._wantPstats is None:
             self._wantPstats = config.GetBool('pstats-eventmanager', 0)
-            from pandac.PandaModules import PStatCollector
             EventManager.PStatCollector = PStatCollector
         # use different methods for handling events with and without pstats tracking
         # for efficiency
@@ -80,18 +71,8 @@ class EventManager:
             return None
         else:
             # Must be some user defined type, return the ptr
-            # which will be downcast to that type
-            ptr = eventParameter.getPtr()
-
-            if EventManager.EventStorePandaNode is None:
-                from pandac.PandaModules import EventStorePandaNode
-                EventManager.EventStorePandaNode = EventStorePandaNode
-            if isinstance(ptr, EventManager.EventStorePandaNode):
-                # Actually, it's a kludgey wrapper around a PandaNode
-                # pointer.  Return the node.
-                ptr = ptr.getValue()
-
-            return ptr
+            # which will be downcast to that type.
+            return eventParameter.getPtr()
         
     def processEvent(self, event):
         """
@@ -190,28 +171,19 @@ class EventManager:
 
 
     def restart(self):
-        if None in (EventManager.EventQueue, EventManager.EventHandler):
-            from pandac.PandaModules import EventQueue, EventHandler
-            EventManager.EventQueue = EventQueue
-            EventManager.EventHandler = EventHandler
-        
         if self.eventQueue == None:
-            self.eventQueue = EventManager.EventQueue.getGlobalEventQueue()
+            self.eventQueue = EventQueue.getGlobalEventQueue()
 
         if self.eventHandler == None:
-            if self.eventQueue == EventManager.EventQueue.getGlobalEventQueue():
+            if self.eventQueue == EventQueue.getGlobalEventQueue():
                 # If we are using the global event queue, then we also
                 # want to use the global event handler.
-                self.eventHandler = EventManager.EventHandler.getGlobalEventHandler()
+                self.eventHandler = EventHandler.getGlobalEventHandler()
             else:
                 # Otherwise, we need our own event handler.
-                self.eventHandler = EventManager.EventHandler(self.eventQueue)
+                self.eventHandler = EventHandler(self.eventQueue)
 
-        # Should be safe to import the global taskMgr by now.
-        from direct.task.TaskManagerGlobal import taskMgr
         taskMgr.add(self.eventLoopTask, 'eventManager')
 
     def shutdown(self):
-        # Should be safe to import the global taskMgr by now.
-        from direct.task.TaskManagerGlobal import taskMgr
         taskMgr.remove('eventManager')

+ 3 - 3
direct/src/showbase/ExceptionVarDump.py

@@ -1,4 +1,4 @@
-from pandac.PandaModules import getConfigShowbase
+from panda3d.direct import get_config_showbase
 from direct.directnotify.DirectNotifyGlobal import directNotify
 from direct.showbase.PythonUtil import fastRepr
 import sys
@@ -6,7 +6,7 @@ import types
 import traceback
 
 notify = directNotify.newCategory("ExceptionVarDump")
-config = getConfigShowbase()
+config = get_config_showbase()
 
 reentry = 0
 
@@ -154,7 +154,7 @@ def _excepthookDumpVars(eType, eValue, tb):
                     for attrName in attrNames:
                         obj = attrName2obj[attrName]
                         stateStack.push(['%s.%s' % (name, attrName), obj, ids])
-                
+
         tb = tb.tb_next
 
     if foundRun:

+ 1 - 1
direct/src/showbase/Job.py

@@ -1,7 +1,7 @@
 from direct.showbase.DirectObject import DirectObject
 
 if __debug__:
-    from pandac.PandaModules import PStatCollector
+    from panda3d.core import PStatCollector
 
 class Job(DirectObject):
     # Base class for cpu-intensive or non-time-critical operations that

+ 5 - 75
direct/src/showbase/Messenger.py

@@ -7,78 +7,8 @@ from PythonUtil import *
 from direct.directnotify import DirectNotifyGlobal
 import types
 
-from panda3d.core import ConfigVariableBool
-
-# If using the Toontown ActiveX launcher, this must be set true.
-# Also, Panda must be compiled with SIMPLE_THREADS or no HAVE_THREADS
-# at all.  In the normal Panda case, this should be set false.
-if ConfigVariableBool('delay-messenger-lock', False).getValue():
-    class Lock:
-        """ This is a cheesy delayed implementation of Lock, designed to
-        support the Toontown ActiveX launch, which must import Messenger
-        before it has downloaded the rest of Panda.  Note that this
-        cheesy lock isn't thread-safe if the application starts any
-        threads before acquiring the Messenger lock the first time.
-        (However, it's mostly thread-safe if Panda is compiled with
-        SIMPLE_THREADS.) """
-
-        notify = DirectNotifyGlobal.directNotify.newCategory("Messenger.Lock")
-
-        def __init__(self):
-            self.locked = 0
-
-        def acquire(self):
-            # Before we download Panda, we can't use any threading
-            # interfaces.  So don't, until we observe that we have some
-            # actual contention on the lock.
-
-            if self.locked:
-                # We have contention.
-                return self.__getLock()
-
-            # This relies on the fact that any individual Python statement
-            # is atomic.
-            self.locked += 1
-            if self.locked > 1:
-                # Whoops, we have contention.
-                self.locked -= 1
-                return self.__getLock()
-
-        def release(self):
-            if self.locked:
-                # Still using the old, cheesy lock.
-                self.locked -= 1
-                return
-
-            # The new lock must have been put in place.
-            self.release = self.lock.release
-            return self.lock.release()
-
-        def __getLock(self):
-            # Now that we've started Panda, it's safe to import the Mutex
-            # class, which becomes our actual lock.
-            # From now on, this lock will be used.
-
-            self.notify.info("Acquiring Panda lock for the first time.")
-
-            from pandac.PandaModules import Thread, Mutex
-            self.__dict__.setdefault('lock', Mutex('Messenger'))
-            self.lock.acquire()
-
-            self.acquire = self.lock.acquire
-
-            # Wait for the cheesy lock to be released before we return.
-            self.notify.info("Waiting for cheesy lock to be released.")
-            while self.locked:
-                Thread.forceYield()
-            self.notify.info("Got cheesy lock.")
-
-            # We return with the lock acquired.
-else:            
-    # In the normal case, there's no reason not to import all of
-    # libpanda right away, and so we can just use Lock directly.  This
-    # is perfectly thread-safe.
-    from direct.stdpy.threading import Lock
+from panda3d.core import ConfigVariableBool, Thread, Mutex
+from direct.stdpy.threading import Lock
 
 class Messenger:
 
@@ -215,7 +145,7 @@ class Messenger:
             # on this particular object.
             if id in acceptorDict:
                 # TODO: we're replacing the existing callback. should this be an error?
-                if notifyDebug:        
+                if notifyDebug:
                     oldMethod = acceptorDict[id][0]
                     if oldMethod == method:
                         self.notify.warning(
@@ -420,7 +350,7 @@ class Messenger:
                 if not eventTuple:
                     # No event; we're done.
                     return task.done
-                
+
                 self.__dispatch(*eventTuple)
             finally:
                 self.lock.release()
@@ -450,7 +380,7 @@ class Messenger:
                         if (len(eventDict) == 0):
                             del self.__objectEvents[id]
                         self._releaseObject(self._getObject(id))
-                        
+
                     del acceptorDict[id]
                     # If the dictionary at this event is now empty, remove
                     # the event entry from the Messenger altogether

+ 0 - 12
direct/src/showbase/PandaObject.py

@@ -1,12 +0,0 @@
-"""Undocumented Module"""
-
-__all__ = []
-
-## from DirectObject import *
-## from pandac.PandaModules import *
-## 
-## class PandaObject(DirectObject):
-##     """
-##     This is the class that all Panda/Show classes should inherit from
-##     """
-##     pass

+ 1 - 1
direct/src/showbase/PhysicsManagerGlobal.py

@@ -2,6 +2,6 @@
 
 __all__ = ['physicsMgr']
 
-from pandac.PandaModules import PhysicsManager
+from panda3d.physics import PhysicsManager
 
 physicsMgr = PhysicsManager()

File diff suppressed because it is too large
+ 18 - 967
direct/src/showbase/PythonUtil.py


+ 71 - 64
direct/src/showbase/ShowBase.py

@@ -8,11 +8,11 @@ __all__ = ['ShowBase', 'WindowControls']
 #import VerboseImport
 
 from panda3d.core import *
-from panda3d.direct import *
+from panda3d.direct import get_config_showbase, throw_new_frame, init_app_for_gui
 
 # This needs to be available early for DirectGUI imports
 import __builtin__
-__builtin__.config = getConfigShowbase()
+__builtin__.config = get_config_showbase()
 
 from direct.directnotify.DirectNotifyGlobal import *
 from MessengerGlobal import *
@@ -46,7 +46,7 @@ import AppRunnerGlobal
 
 __builtin__.FADE_SORT_INDEX = 1000
 __builtin__.NO_FADE_SORT_INDEX = 2000
-    
+
 
 # Now ShowBase is a DirectObject.  We need this so ShowBase can hang
 # hooks on messages, particularly on window-event.  This doesn't
@@ -58,7 +58,7 @@ class ShowBase(DirectObject.DirectObject):
     def __init__(self, fStartDirect = True, windowType = None):
         self.__dev__ = config.GetBool('want-dev', __debug__)
         __builtin__.__dev__ = self.__dev__
-        
+
         logStackDump = (config.GetBool('log-stack-dump', False) or
                         config.GetBool('client-log-stack-dump', False))
         uploadStackDump = config.GetBool('upload-stack-dump', False)
@@ -74,7 +74,7 @@ class ShowBase(DirectObject.DirectObject):
 
         #debug running multiplier
         self.debugRunningMultiplier = 4
-        
+
         # Get the dconfig object
         self.config = config
         # Setup wantVerifyPdb as soon as reasonable:
@@ -91,7 +91,7 @@ class ShowBase(DirectObject.DirectObject):
         self.nextWindowIndex = 1
         self.__directStarted = False
         self.__deadInputs = 0
-        
+
         # Store dconfig variables
         self.sfxActive = self.config.GetBool('audio-sfx-active', 1)
         self.musicActive = self.config.GetBool('audio-music-active', 1)
@@ -253,7 +253,7 @@ class ShowBase(DirectObject.DirectObject):
 
         self.loader = Loader.Loader(self)
         self.graphicsEngine.setDefaultLoader(self.loader.loader)
-            
+
         self.eventMgr = eventMgr
         self.messenger = messenger
         self.bboard = bulletinBoard
@@ -315,7 +315,7 @@ class ShowBase(DirectObject.DirectObject):
         # Make sure we're not making more than one ShowBase.
         if 'base' in __builtin__.__dict__:
             raise StandardError, "Attempt to spawn multiple ShowBase instances!"
-        
+
         __builtin__.base = self
         __builtin__.render2d = self.render2d
         __builtin__.aspect2d = self.aspect2d
@@ -357,7 +357,7 @@ class ShowBase(DirectObject.DirectObject):
 
         # set up recording of Functor creation stacks in __dev__
         PythonUtil.recordFunctorCreationStacks()
-        
+
         if __dev__ or self.config.GetBool('want-e3-hacks', False):
             if self.config.GetBool('track-gui-items', True):
                 # dict of guiId to gui item, for tracking down leaks
@@ -372,6 +372,7 @@ class ShowBase(DirectObject.DirectObject):
         # Now hang a hook on the window-event from Panda.  This allows
         # us to detect when the user resizes, minimizes, or closes the
         # main window.
+        self.__prevWindowProperties = None
         self.accept('window-event', self.windowEvent)
 
         # Transition effects (fade, iris, etc)
@@ -386,7 +387,7 @@ class ShowBase(DirectObject.DirectObject):
         sleepTime = self.config.GetFloat('client-sleep', 0.0)
         self.clientSleep = 0.0
         self.setSleep(sleepTime)
-        
+
         # Extra sleep for running 4+ clients on a single machine
         # adds a sleep right after the main render in igloop
         # tends to even out the frame rate and keeps it from going
@@ -414,7 +415,7 @@ class ShowBase(DirectObject.DirectObject):
 
         # Start IGLOOP
         self.restart()
-        
+
     # add a collision traverser via pushCTrav and remove it via popCTrav
     # that way the owner of the new cTrav doesn't need to hold onto the
     # previous one in order to put it back
@@ -427,7 +428,7 @@ class ShowBase(DirectObject.DirectObject):
     def __setupProfile(self):
         """ Sets up the Python profiler, if avaialable, according to
         some Panda config settings. """
-        
+
         try:
             import profile, pstats
         except ImportError:
@@ -499,7 +500,7 @@ class ShowBase(DirectObject.DirectObject):
 
         vfs = VirtualFileSystem.getGlobalPtr()
         vfs.unmountAll()
-        
+
 
     def exitfunc(self):
         """
@@ -609,7 +610,7 @@ class ShowBase(DirectObject.DirectObject):
 
         If requireWindow is true, it means that the function should
         raise an exception if the window fails to open correctly.
-        
+
         """
 
         # Save this lambda here for convenience; we'll use it to call
@@ -622,19 +623,19 @@ class ShowBase(DirectObject.DirectObject):
             keepCamera = keepCamera, scene = scene, stereo = stereo,
             unexposedDraw = unexposedDraw,
             callbackWindowDict = callbackWindowDict)
-        
+
         if self.win:
             # If we've already opened a window before, this is just a
             # pass-through to _doOpenWindow().
             win = func()
             self.graphicsEngine.openWindows()
             return win
-        
+
         if type is None:
             type = self.windowType
         if requireWindow is None:
             requireWindow = self.requireWindow
-            
+
         win = func()
 
         # Give the window a chance to truly open.
@@ -699,7 +700,7 @@ class ShowBase(DirectObject.DirectObject):
             # GSG from that buffer.
             host = gsg
             gsg = gsg.getGsg()
-            
+
         # If we are using DirectX, force a new GSG to be created,
         # since at the moment DirectX seems to misbehave if we do
         # not do this.  This will cause a delay while all textures
@@ -764,7 +765,7 @@ class ShowBase(DirectObject.DirectObject):
                 func = callbackWindowDict.get(callbackName, None)
                 if not func:
                     continue
-                
+
                 setCallbackName = 'set%sCallback' % (callbackName)
                 setCallback = getattr(win, setCallbackName)
                 setCallback(PythonCallbackObject(func))
@@ -804,10 +805,10 @@ class ShowBase(DirectObject.DirectObject):
         pointer to None.
         """
         win.setActive(False)
-        
+
         # First, remove all of the cameras associated with display
         # regions on the window.
-        numRegions = win.getNumDisplayRegions()        
+        numRegions = win.getNumDisplayRegions()
         for i in range(numRegions):
             dr = win.getDisplayRegion(i)
             # [gjeon] remove drc in base.direct.drList
@@ -816,9 +817,9 @@ class ShowBase(DirectObject.DirectObject):
                     if drc.cam == dr.getCamera():
                         base.direct.drList.displayRegionList.remove(drc)
                         break
-                    
+
             cam = NodePath(dr.getCamera())
-            
+
             dr.setCamera(NodePath())
 
             if not cam.isEmpty() and \
@@ -905,7 +906,7 @@ class ShowBase(DirectObject.DirectObject):
         closed window).
         """
         keepCamera = kw.get('keepCamera', False)
-        
+
         success = 1
         oldWin = self.win
         oldLens = self.camLens
@@ -1066,7 +1067,7 @@ class ShowBase(DirectObject.DirectObject):
         self.aspect2d = self.render2d.attachNewNode(PGTop("aspect2d"))
         self.aspect2d.setScale(1.0 / aspectRatio, 1.0, 1.0)
 
-        self.a2dBackground = self.aspect2d.attachNewNode("a2dBackground") 
+        self.a2dBackground = self.aspect2d.attachNewNode("a2dBackground")
 
         # It's important to know the bounds of the aspect2d screen.
         self.a2dTop = 1.0
@@ -1110,7 +1111,7 @@ class ShowBase(DirectObject.DirectObject):
         self.a2dBottomLeftNs.setPos(self.a2dLeft, 0, self.a2dBottom)
         self.a2dBottomRight.setPos(self.a2dRight, 0, self.a2dBottom)
         self.a2dBottomRightNs.setPos(self.a2dRight, 0, self.a2dBottom)
-        
+
         # This special root, pixel2d, uses units in pixels that are relative
         # to the window. The upperleft corner of the window is (0, 0),
         # the lowerleft corner is (xsize, -ysize), in this coordinate system.
@@ -1182,7 +1183,7 @@ class ShowBase(DirectObject.DirectObject):
         self.a2dpTopRight.setPos(self.a2dpRight, 0, self.a2dpTop)
         self.a2dpBottomLeft.setPos(self.a2dpLeft, 0, self.a2dpBottom)
         self.a2dpBottomRight.setPos(self.a2dpRight, 0, self.a2dpBottom)
-        
+
         # This special root, pixel2d, uses units in pixels that are relative
         # to the window. The upperleft corner of the window is (0, 0),
         # the lowerleft corner is (xsize, -ysize), in this coordinate system.
@@ -1229,7 +1230,7 @@ class ShowBase(DirectObject.DirectObject):
 
         if aspectRatio == 0:
             return 1
-        
+
         return aspectRatio
 
     def getSize(self, win = None):
@@ -1337,7 +1338,7 @@ class ShowBase(DirectObject.DirectObject):
         # we will if clearDepth is specified.
         if clearDepth:
             dr.setClearDepthActive(1)
-            
+
         if clearColor:
             dr.setClearColorActive(1)
             dr.setClearColor(clearColor)
@@ -1370,7 +1371,7 @@ class ShowBase(DirectObject.DirectObject):
             cam2dNode = Camera('cam2d_' + cameraName)
         else:
             cam2dNode = Camera('cam2d')
-            
+
         if lens == None:
             lens = OrthographicLens()
             lens.setFilmSize(right - left, top - bottom)
@@ -1480,12 +1481,12 @@ class ShowBase(DirectObject.DirectObject):
 
         if fMultiWin:
             return bts[0]
-        
+
         self.buttonThrowers = bts[:]
         self.pointerWatcherNodes = pws[:]
 
         self.mouseWatcher = self.buttonThrowers[0].getParent()
-        self.mouseWatcherNode = self.mouseWatcher.node()  
+        self.mouseWatcherNode = self.mouseWatcher.node()
 
         if self.mouseInterface:
             self.mouseInterface.reparentTo(self.mouseWatcher)
@@ -1553,7 +1554,7 @@ class ShowBase(DirectObject.DirectObject):
                 # track the left and right halves of the screen
                 # individually.
                 mw.node().setDisplayRegion(win.getOverlayDisplayRegion())
-                
+
             mb = mw.node().getModifierButtons()
             mb.addButton(KeyboardButton.shift())
             mb.addButton(KeyboardButton.control())
@@ -1675,7 +1676,7 @@ class ShowBase(DirectObject.DirectObject):
             port = -1
         PStatClient.connect(hostname, port)
         return PStatClient.isConnected()
-                
+
 
     def addSfxManager(self, extraSfxManager):
         # keep a list of sfx manager objects to apply settings to,
@@ -1841,11 +1842,11 @@ class ShowBase(DirectObject.DirectObject):
         case we're responsible for taking out Panda's garbage from
         time to time.  This is not to be confused with Python's
         garbage collection.  """
-        
+
         TransformState.garbageCollect()
         RenderState.garbageCollect()
         return Task.cont
-        
+
     def __igLoop(self, state):
         # We render the watch variables for the onScreenDebug as soon
         # as we reasonably can before the renderFrame().
@@ -1881,7 +1882,7 @@ class ShowBase(DirectObject.DirectObject):
 
         # Lerp stuff needs this event, and it must be generated in
         # C++, not in Python.
-        throwNewFrame()
+        throw_new_frame()
         return Task.cont
 
 
@@ -1927,8 +1928,8 @@ class ShowBase(DirectObject.DirectObject):
 
         # Lerp stuff needs this event, and it must be generated in
         # C++, not in Python.
-        throwNewFrame()
-        return Task.cont    
+        throw_new_frame()
+        return Task.cont
 
     def restart(self,clusterSync=False,cluster=None):
         self.shutdown()
@@ -2196,7 +2197,7 @@ class ShowBase(DirectObject.DirectObject):
 
         self.showVertices = self.cam.attachNewNode(cam)
         dr.setCamera(self.showVertices)
-        
+
 
     def oobe(self, cam = None):
         """
@@ -2221,7 +2222,7 @@ class ShowBase(DirectObject.DirectObject):
         """
         if cam is None:
             cam = self.cam
-            
+
         # If oobeMode was never set, set it to false and create the
         # structures we need to implement OOBE.
         if not hasattr(self, 'oobeMode'):
@@ -2614,9 +2615,15 @@ class ShowBase(DirectObject.DirectObject):
             return Task.cont
 
     def windowEvent(self, win):
-        if win == self.win:
-            properties = win.getProperties()
-            self.notify.info("Got window event: %s" % (repr(properties)))
+        if win != self.win:
+            # This event isn't about our window.
+            return
+
+        properties = win.getProperties()
+        if properties != self.__prevWindowProperties:
+            self.__prevWindowProperties = properties
+
+            self.notify.debug("Got window event: %s" % (repr(properties)))
             if not properties.getOpen():
                 # If the user closes the main window, we should exit.
                 self.notify.info("User closed main window.")
@@ -2645,7 +2652,7 @@ class ShowBase(DirectObject.DirectObject):
             # If we have not forced the aspect ratio, let's see if it has
             # changed and update the camera lenses and aspect2d parameters
             self.adjustWindowAspectRatio(self.getAspectRatio())
-            
+
             # Temporary hasattr for old Pandas
             if not hasattr(win, 'getSbsLeftXSize'):
                 self.pixel2d.setScale(2.0 / win.getXSize(), 1.0, 2.0 / win.getYSize())
@@ -2659,7 +2666,7 @@ class ShowBase(DirectObject.DirectObject):
         windowEvent(), but it may also be called to explicitly adjust
         the aspect ratio of the render/render2d DisplayRegion, by a
         class that has redefined these. """
-        
+
         if self.__configAspectRatio:
             aspectRatio = self.__configAspectRatio
 
@@ -2694,13 +2701,13 @@ class ShowBase(DirectObject.DirectObject):
                 self.a2dpTop = 1.0
                 self.a2dpBottom = -1.0
                 self.a2dpLeft = -aspectRatio
-                self.a2dpRight = aspectRatio                        
+                self.a2dpRight = aspectRatio
 
             # Reposition the aspect2d marker nodes
             self.a2dTopCenter.setPos(0, self.a2dTop, self.a2dTop)
             self.a2dBottomCenter.setPos(0, self.a2dBottom, self.a2dBottom)
             self.a2dLeftCenter.setPos(self.a2dLeft, 0, 0)
-            self.a2dRightCenter.setPos(self.a2dRight, 0, 0)                    
+            self.a2dRightCenter.setPos(self.a2dRight, 0, 0)
             self.a2dTopLeft.setPos(self.a2dLeft, self.a2dTop, self.a2dTop)
             self.a2dTopRight.setPos(self.a2dRight, self.a2dTop, self.a2dTop)
             self.a2dBottomLeft.setPos(self.a2dLeft, self.a2dBottom, self.a2dBottom)
@@ -2710,17 +2717,17 @@ class ShowBase(DirectObject.DirectObject):
             self.a2dTopCenterNs.setPos(0, self.a2dTop, self.a2dTop)
             self.a2dBottomCenterNs.setPos(0, self.a2dBottom, self.a2dBottom)
             self.a2dLeftCenterNs.setPos(self.a2dLeft, 0, 0)
-            self.a2dRightCenterNs.setPos(self.a2dRight, 0, 0)                    
+            self.a2dRightCenterNs.setPos(self.a2dRight, 0, 0)
             self.a2dTopLeftNs.setPos(self.a2dLeft, self.a2dTop, self.a2dTop)
             self.a2dTopRightNs.setPos(self.a2dRight, self.a2dTop, self.a2dTop)
             self.a2dBottomLeftNs.setPos(self.a2dLeft, self.a2dBottom, self.a2dBottom)
-            self.a2dBottomRightNs.setPos(self.a2dRight, self.a2dBottom, self.a2dBottom)                    
+            self.a2dBottomRightNs.setPos(self.a2dRight, self.a2dBottom, self.a2dBottom)
 
             # Reposition the aspect2dp marker nodes
             self.a2dpTopCenter.setPos(0, self.a2dpTop, self.a2dpTop)
             self.a2dpBottomCenter.setPos(0, self.a2dpBottom, self.a2dpBottom)
             self.a2dpLeftCenter.setPos(self.a2dpLeft, 0, 0)
-            self.a2dpRightCenter.setPos(self.a2dpRight, 0, 0)                  
+            self.a2dpRightCenter.setPos(self.a2dpRight, 0, 0)
             self.a2dpTopLeft.setPos(self.a2dpLeft, self.a2dpTop, self.a2dpTop)
             self.a2dpTopRight.setPos(self.a2dpRight, self.a2dpTop, self.a2dpTop)
             self.a2dpBottomLeft.setPos(self.a2dpLeft, self.a2dpBottom, self.a2dpBottom)
@@ -2728,7 +2735,7 @@ class ShowBase(DirectObject.DirectObject):
 
             # If anybody needs to update their GUI, put a callback on this event
             messenger.send("aspectRatioChanged")
-        
+
     def userExit(self):
         # The user has requested we exit the program.  Deal with this.
         if self.exitFunc:
@@ -2752,12 +2759,12 @@ class ShowBase(DirectObject.DirectObject):
         This sets up a wxTimer callback so that Panda still gets
         updated, but wxPython owns the main loop (which seems to make
         it happier than the other way around). """
-        
+
         if self.wxApp:
             # Don't do this twice.
             return
 
-        initAppForGui()
+        init_app_for_gui()
 
         import wx
         # Create a new base.wxApp.
@@ -2767,7 +2774,7 @@ class ShowBase(DirectObject.DirectObject):
             # Put wxPython in charge of the main loop.  It really
             # seems to like this better; some features of wx don't
             # work properly unless this is true.
-            
+
             # Set a timer to run the Panda frame 60 times per second.
             wxFrameRate = ConfigVariableDouble('wx-frame-rate', 60.0)
             self.wxTimer = wx.Timer(self.wxApp)
@@ -2802,19 +2809,19 @@ class ShowBase(DirectObject.DirectObject):
             # This happens when the wxTimer expires while igLoop is
             # rendering.  Ignore it.
             return
-        
+
         self.taskMgr.step()
 
     def wxRun(self):
         """ This method replaces base.run() after we have called
         spawnWxLoop().  Since at this point wxPython now owns the main
         loop, this method is a call to wxApp.MainLoop(). """
-        
+
         if Thread.getCurrentThread().getCurrentTask():
             # This happens in the p3d environment during startup.
             # Ignore it.
             return
-        
+
         self.wxApp.MainLoop()
 
     def startTk(self, fWantTk = True):
@@ -2829,7 +2836,7 @@ class ShowBase(DirectObject.DirectObject):
         This sets up a timer callback so that Panda still gets
         updated, but Tkinter owns the main loop (which seems to make
         it happier than the other way around). """
-        
+
         if self.tkRoot:
             # Don't do this twice.
             return
@@ -2841,13 +2848,13 @@ class ShowBase(DirectObject.DirectObject):
         self.tkRoot = Pmw.initialise()
         __builtin__.tkroot = self.tkRoot
 
-        initAppForGui()
+        init_app_for_gui()
 
         if ConfigVariableBool('tk-main-loop', True):
             # Put Tkinter in charge of the main loop.  It really
             # seems to like this better; the GUI otherwise becomes
             # largely unresponsive on Mac OS X unless this is true.
-            
+
             # Set a timer to run the Panda frame 60 times per second.
             tkFrameRate = ConfigVariableDouble('tk-frame-rate', 60.0)
             self.tkDelay = int(1000.0 / tkFrameRate.getValue())
@@ -2885,12 +2892,12 @@ class ShowBase(DirectObject.DirectObject):
         """ This method replaces base.run() after we have called
         spawnTkLoop().  Since at this point Tkinter now owns the main
         loop, this method is a call to tkRoot.mainloop(). """
-        
+
         if Thread.getCurrentThread().getCurrentTask():
             # This happens in the p3d environment during startup.
             # Ignore it.
             return
-        
+
         self.tkRoot.mainloop()
 
     def startDirect(self, fWantDirect = 1, fWantTk = 1, fWantWx = 0):
@@ -2913,7 +2920,7 @@ class ShowBase(DirectObject.DirectObject):
         if self.__directStarted:
             return
         self.__directStarted = False
-        
+
         # Start Tk, Wx and DIRECT if specified by Config.prc
         fTk = self.config.GetBool('want-tk', 0)
         fWx = self.config.GetBool('want-wx', 0)

+ 24 - 23
direct/src/showbase/VFSImporter.py

@@ -1,14 +1,13 @@
+__all__ = ['register', 'sharedPackages',
+           'reloadSharedPackage', 'reloadSharedPackages']
+
 from panda3d.core import Filename, VirtualFileSystem, VirtualFileMountSystem, OFileStream, copyStream
 import sys
-import os
 import marshal
 import imp
 import types
 import __builtin__
 
-__all__ = ['register', 'sharedPackages',
-           'reloadSharedPackage', 'reloadSharedPackages']
-
 # The sharedPackages dictionary lists all of the "shared packages",
 # special Python packages that automatically span multiple directories
 # via magic in the VFSImporter.  You can make a package "shared"
@@ -76,14 +75,13 @@ class VFSImporter:
         for desc in imp.get_suffixes():
             if desc[2] != imp.C_EXTENSION:
                 continue
-            
+
             filename = Filename(path)
             filename.setExtension(desc[0][1:])
             vfile = vfs.getFile(filename, True)
             if vfile:
                 return VFSLoader(dir_path, vfile, filename, FTExtensionModule,
                                  desc = desc)
-        
 
         # Finally, consider a package, i.e. a directory containing
         # __init__.py.
@@ -105,7 +103,7 @@ class VFSImporter:
 class VFSLoader:
     """ The second part of VFSImporter, this is created for a
     particular .py file or directory. """
-    
+
     def __init__(self, dir_path, vfile, filename, fileType,
                  desc = None, packagePath = None):
         self.dir_path = dir_path
@@ -116,7 +114,7 @@ class VFSLoader:
         self.fileType = fileType
         self.desc = desc
         self.packagePath = packagePath
-    
+
     def load_module(self, fullname, loadingShared = False):
         #print >>sys.stderr, "load_module(%s), dir_path = %s, filename = %s" % (fullname, self.dir_path, self.filename)
         if self.fileType == FTFrozenModule:
@@ -136,11 +134,11 @@ class VFSLoader:
                 loader = importer.find_module(fullname, path = path)
                 assert loader
                 return loader.load_module(fullname)
-        
+
         code = self._read_code()
         if not code:
             raise ImportError, 'No Python code in %s' % (fullname)
-        
+
         mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
         mod.__file__ = self.filename.toOsSpecific()
         mod.__loader__ = self
@@ -169,15 +167,15 @@ class VFSLoader:
 
     def get_filename(self, fullname):
         return self.filename.toOsSpecific()
-        
+
     def _read_source(self):
         """ Returns the Python source for this file, if it is
         available, or None if it is not.  May raise IOError. """
-        
+
         if self.fileType == FTPythonCompiled or \
            self.fileType == FTExtensionModule:
             return None
-        
+
         filename = Filename(self.filename)
         filename.setExtension('py')
         filename.setText()
@@ -281,7 +279,7 @@ class VFSLoader:
     def _loadPyc(self, vfile, timestamp):
         """ Reads and returns the marshal data from a .pyc file.
         Raises ValueError if there is a problem. """
-        
+
         code = None
         data = vfile.readFile(True)
         if data[:4] == imp.get_magic():
@@ -294,13 +292,13 @@ class VFSLoader:
         else:
             raise ValueError, 'Bad magic number in %s' % (vfile)
         return code
-        
+
 
     def _compile(self, filename, source):
         """ Compiles the Python source code to a code object and
         attempts to write it to an appropriate .pyc file.  May raise
         SyntaxError or other errors generated by the compiler. """
-        
+
         if source and source[-1] != '\n':
             source = source + '\n'
         code = __builtin__.compile(source, filename.toOsSpecific(), 'exec')
@@ -335,7 +333,7 @@ class VFSSharedImporter:
 
     def __init__(self):
         pass
-    
+
     def find_module(self, fullname, path = None, reload = False):
         #print >>sys.stderr, "shared find_module(%s), path = %s" % (fullname, path)
 
@@ -364,14 +362,14 @@ class VFSSharedImporter:
         for dir in path:
             if dir in excludePaths:
                 continue
-            
+
             importer = sys.path_importer_cache.get(dir, None)
             if importer is None:
                 try:
                     importer = VFSImporter(dir)
                 except ImportError:
                     continue
-                
+
                 sys.path_importer_cache[dir] = importer
 
             try:
@@ -391,6 +389,9 @@ class VFSSharedImporter:
         """ Returns the directory name that the indicated
         conventionally-loaded module must have been loaded from. """
 
+        if not hasattr(mod, __file__) or mod.__file__ is None:
+            return None
+
         fullname = mod.__name__
         dirname = Filename.fromOsSpecific(mod.__file__).getDirname()
 
@@ -414,15 +415,15 @@ class VFSSharedImporter:
 
         # Couldn't figure it out.
         return None
-        
+
 class VFSSharedLoader:
     """ The second part of VFSSharedImporter, this imports a list of
     packages and combines them. """
-    
+
     def __init__(self, loaders, reload):
         self.loaders = loaders
         self.reload = reload
-    
+
     def load_module(self, fullname):
         #print >>sys.stderr, "shared load_module(%s), loaders = %s" % (fullname, map(lambda l: l.dir_path, self.loaders))
 
@@ -475,7 +476,7 @@ def register():
         _registered = True
         sys.path_hooks.insert(0, VFSImporter)
         sys.meta_path.insert(0, VFSSharedImporter())
-        
+
         # Blow away the importer cache, so we'll come back through the
         # VFSImporter for every folder in the future, even those
         # folders that previously were loaded directly.

+ 11 - 22
direct/src/showutil/FreezeTool.py

@@ -17,9 +17,8 @@ except ImportError:
     p3extend_frozen = None
 
 import direct
-from pandac.PandaModules import *
+from panda3d.core import *
 from pandac.extension_native_helpers import dll_suffix, dll_ext
-import panda3d
 
 # Check to see if we are running python_d, which implies we have a
 # debug build, and we have to build the module with debug options.
@@ -578,6 +577,8 @@ class Freezer:
         if self.platform.startswith('win'):
             self.objectExtension = '.obj'
 
+        self.keepTemporaryFiles = True
+
         # Change any of these to change the generated startup and glue
         # code.
         self.frozenMainCode = frozenMainCode
@@ -912,7 +913,7 @@ class Freezer:
                 continue
             if origName in self.modules:
                 continue
-                
+
             # This module is missing.  Let it be missing in the
             # runtime also.
             self.modules[origName] = self.ModuleDef(origName, exclude = True,
@@ -1110,7 +1111,7 @@ class Freezer:
             # The "module" may end in __init__, but that really means
             # the parent directory.
             dirnames = dirnames[:-1]
-            
+
         self.__addPythonDirs(multifile, moduleDirs, dirnames[:-1], compressionLevel)
 
         filename = '/'.join(dirnames)
@@ -1328,10 +1329,11 @@ class Freezer:
         try:
             compileFunc(filename, basename)
         finally:
-            if (os.path.exists(filename)):
-                os.unlink(filename)
-            if (os.path.exists(basename + self.objectExtension)):
-                os.unlink(basename + self.objectExtension)
+            if not self.keepTemporaryFiles:
+                if os.path.exists(filename):
+                    os.unlink(filename)
+                if os.path.exists(basename + self.objectExtension):
+                    os.unlink(basename + self.objectExtension)
 
         return target
 
@@ -1378,19 +1380,6 @@ class PandaModuleFinder(modulefinder.ModuleFinder):
     def __init__(self, *args, **kw):
         modulefinder.ModuleFinder.__init__(self, *args, **kw)
 
-    def import_module(self, partname, fqname, parent):
-        if parent and parent.__name__ == 'panda3d':
-            # A special case: map a reference to the "panda3d.blah"
-            # module into the appropriate Panda3D dll.
-            m = getattr(panda3d, partname, None)
-            if m and hasattr(m, '__libraries__'):
-                libname = m.__libraries__[-1]
-                partname = libname
-                fqname = libname
-                parent = None
-
-        return modulefinder.ModuleFinder.import_module(self, partname, fqname, parent)
-
     def find_module(self, name, path, parent=None):
         try:
             return modulefinder.ModuleFinder.find_module(self, name, path, parent = parent)
@@ -1425,7 +1414,7 @@ class PandaModuleFinder(modulefinder.ModuleFinder):
             m = self.add_module(fqname)
             m.__file__ = '<frozen>'
             if isPackage:
-                m.__path__ = pathname
+                m.__path__ = [pathname]
             co = marshal.loads(co)
             if self.replace_paths:
                 co = self.replace_paths_in_code(co)

+ 28 - 28
direct/src/stdpy/file.py

@@ -10,10 +10,10 @@ __all__ = [
     'execfile',
     ]
 
-from pandac import PandaModules as pm
+from panda3d import core
 import types
 
-_vfs = pm.VirtualFileSystem.getGlobalPtr()
+_vfs = core.VirtualFileSystem.getGlobalPtr()
 
 class file:
     def __init__(self, filename, mode = 'r', bufsize = None,
@@ -36,14 +36,14 @@ class file:
         readMode = False
         writeMode = False
 
-        if isinstance(filename, pm.Istream) or isinstance(filename, pm.Ostream):
+        if isinstance(filename, core.Istream) or isinstance(filename, core.Ostream):
             # If we were given a stream instead of a filename, assign
             # it directly.
             self.__stream = filename
-            readMode = isinstance(filename, pm.Istream)
-            writeMode = isinstance(filename, pm.Ostream)
+            readMode = isinstance(filename, core.Istream)
+            writeMode = isinstance(filename, core.Ostream)
 
-        elif isinstance(filename, pm.VirtualFile):
+        elif isinstance(filename, core.VirtualFile):
             # We can also "open" a VirtualFile object for reading.
             self.__stream = filename.openReadFile(autoUnwrap)
             if not self.__stream:
@@ -57,10 +57,10 @@ class file:
             if isinstance(filename, types.StringTypes):
                 # If a raw string is given, assume it's an os-specific
                 # filename.
-                filename = pm.Filename.fromOsSpecific(filename)
+                filename = core.Filename.fromOsSpecific(filename)
             else:
                 # If a Filename is given, make a writable copy anyway.
-                filename = pm.Filename(filename)
+                filename = core.Filename(filename)
 
             self.filename = filename
             self.name = filename.toOsSpecific()
@@ -97,7 +97,7 @@ class file:
                     message = 'Could not open %s for writing' % (filename)
                     raise IOError, message
                 writeMode = True
-                
+
             elif mode == 'a':
                 self.__stream = _vfs.openAppendFile(filename)
                 if not self.__stream:
@@ -142,14 +142,14 @@ class file:
             self.__needsVfsClose = True
 
         if readMode:
-            self.__reader = pm.StreamReader(self.__stream, False)
+            self.__reader = core.StreamReader(self.__stream, False)
         if writeMode:
-            self.__writer = pm.StreamWriter(self.__stream, False)
+            self.__writer = core.StreamWriter(self.__stream, False)
             self.__lastWrite = True
 
     def __del__(self):
         self.close()
-        
+
     def close(self):
         if self.__needsVfsClose:
             if self.__reader and self.__writer:
@@ -158,13 +158,13 @@ class file:
                 _vfs.closeReadFile(self.__stream)
             else:  # self.__writer:
                 _vfs.closeWriteFile(self.__stream)
-                
+
             self.__needsVfsClose = False
         self.__stream = None
         self.__needsVfsClose = False
         self.__reader = None
         self.__writer = None
-        
+
     def flush(self):
         if self.__stream:
             self.__stream.clear()  # clear eof flag
@@ -188,7 +188,7 @@ class file:
             # The stream is open only in write mode.
             message = 'Attempt to read from write-only stream'
             raise IOError, message
-        
+
         self.__stream.clear()  # clear eof flag
         self.__lastWrite = False
         if size >= 0:
@@ -199,7 +199,7 @@ class file:
             while not self.__stream.eof():
                 result += self.__reader.extractBytes(1024)
         return result
-        
+
     def readline(self, size = -1):
         if not self.__reader:
             if not self.__writer:
@@ -213,7 +213,7 @@ class file:
         self.__stream.clear()  # clear eof flag
         self.__lastWrite = False
         return self.__reader.readline()
-        
+
     def readlines(self, sizehint = -1):
         lines = []
         line = self.readline()
@@ -241,7 +241,7 @@ class file:
                 return self.__stream.tellg()
         message = 'I/O operation on closed file'
         raise ValueError, message
-    
+
     def truncate(self):
         """ Sorry, this isn't supported by Panda's low-level I/O,
         because it isn't supported by the standard C++ library. """
@@ -287,7 +287,7 @@ open = file
 def listdir(path):
     """ Implements os.listdir over vfs. """
     files = []
-    dirlist = _vfs.scanDirectory(pm.Filename.fromOsSpecific(path))
+    dirlist = _vfs.scanDirectory(core.Filename.fromOsSpecific(path))
     if dirlist is None:
         message = 'No such file or directory: %s' % (path)
         raise OSError, message
@@ -338,31 +338,31 @@ def join(path, *args):
     return path
 
 def isfile(path):
-    return _vfs.isRegularFile(pm.Filename.fromOsSpecific(path))
+    return _vfs.isRegularFile(core.Filename.fromOsSpecific(path))
 
 def isdir(path):
-    return _vfs.isDirectory(pm.Filename.fromOsSpecific(path))
+    return _vfs.isDirectory(core.Filename.fromOsSpecific(path))
 
 def exists(path):
-    return _vfs.exists(pm.Filename.fromOsSpecific(path))
+    return _vfs.exists(core.Filename.fromOsSpecific(path))
 
 def lexists(path):
-    return _vfs.exists(pm.Filename.fromOsSpecific(path))
-    
+    return _vfs.exists(core.Filename.fromOsSpecific(path))
+
 def getmtime(path):
-    file = _vfs.getFile(pm.Filename.fromOsSpecific(path), True)
+    file = _vfs.getFile(core.Filename.fromOsSpecific(path), True)
     if not file:
         raise os.error
     return file.getTimestamp()
-    
+
 def getsize(path):
-    file = _vfs.getFile(pm.Filename.fromOsSpecific(path), True)
+    file = _vfs.getFile(core.Filename.fromOsSpecific(path), True)
     if not file:
         raise os.error
     return file.getFileSize()
 
 def execfile(path, globals=None, locals=None):
-    file = _vfs.getFile(pm.Filename.fromOsSpecific(path), True)
+    file = _vfs.getFile(core.Filename.fromOsSpecific(path), True)
     if not file:
         raise os.error
 

+ 3 - 3
direct/src/stdpy/pickle.py

@@ -23,7 +23,7 @@ support extensions of this nature. """
 
 from types import *
 from copy_reg import dispatch_table
-from pandac.PandaModules import BamWriter, BamReader
+from panda3d.core import BamWriter, BamReader
 
 # A funny replacement for "import pickle" so we don't get confused
 # with the local pickle.py.
@@ -37,7 +37,7 @@ class Pickler(pickle.Pickler):
 
     # We have to duplicate most of the save() method, so we can add
     # support for __reduce_persist__().
-    
+
     def save(self, obj):
         # Check for persistent id (defined by a subclass)
         pid = self.persistent_id(obj)
@@ -116,7 +116,7 @@ class Unpickler(pickle.Unpickler):
 
     # Duplicate the load_reduce() function, to provide a special case
     # for the reduction function.
-    
+
     def load_reduce(self):
         stack = self.stack
         args = stack.pop()

+ 30 - 31
direct/src/stdpy/thread.py

@@ -14,13 +14,12 @@ __all__ = [
     'forceYield', 'considerYield',
     ]
 
-# Import PandaModules as pm, so we don't have any namespace collisions.
-from pandac import PandaModules as pm
+from panda3d import core
 
 # These methods are defined in Panda, and are particularly useful if
 # you may be running in Panda's SIMPLE_THREADS compilation mode.
-forceYield = pm.Thread.forceYield
-considerYield = pm.Thread.considerYield
+forceYield = core.Thread.forceYield
+considerYield = core.Thread.considerYield
 
 class error(StandardError):
     pass
@@ -31,10 +30,10 @@ class LockType:
     provide the described Python lock semantics.  In particular, this
     allows a different thread to release the lock than the one that
     acquired it. """
-    
+
     def __init__(self):
-        self.__lock = pm.Mutex('PythonLock')
-        self.__cvar = pm.ConditionVar(self.__lock)
+        self.__lock = core.Mutex('PythonLock')
+        self.__cvar = core.ConditionVar(self.__lock)
         self.__locked = False
 
     def acquire(self, waitflag = 1):
@@ -44,7 +43,7 @@ class LockType:
                 return False
             while self.__locked:
                 self.__cvar.wait()
-                
+
             self.__locked = True
             return True
 
@@ -56,16 +55,16 @@ class LockType:
         try:
             if not self.__locked:
                 raise error, 'Releasing unheld lock.'
-                
+
             self.__locked = False
             self.__cvar.notify()
 
         finally:
             self.__lock.release()
-        
+
     def locked(self):
         return self.__locked
-    
+
     __enter__ = acquire
 
     def __exit__(self, t, v, tb):
@@ -73,7 +72,7 @@ class LockType:
 
 _threads = {}
 _nextThreadId = 0
-_threadsLock = pm.Mutex('thread._threadsLock')
+_threadsLock = core.Mutex('thread._threadsLock')
 
 def start_new_thread(function, args, kwargs = {}, name = None):
     def threadFunc(threadId, function = function, args = args, kwargs = kwargs):
@@ -94,31 +93,31 @@ def start_new_thread(function, args, kwargs = {}, name = None):
 
         if name is None:
             name = 'PythonThread-%s' % (threadId)
-            
-        thread = pm.PythonThread(threadFunc, [threadId], name, name)
+
+        thread = core.PythonThread(threadFunc, [threadId], name, name)
         thread.setPythonData(threadId)
         _threads[threadId] = (thread, {}, None)
-        
-        thread.start(pm.TPNormal, False)
+
+        thread.start(core.TPNormal, False)
         return threadId
 
     finally:
         _threadsLock.release()
 
 def _add_thread(thread, wrapper):
-    """ Adds the indicated pm.Thread object, with the indicated Python
+    """ Adds the indicated core.Thread object, with the indicated Python
     wrapper, to the thread list.  Returns the new thread ID. """
-    
+
     global _nextThreadId
     _threadsLock.acquire()
     try:
         threadId = _nextThreadId
         _nextThreadId += 1
-            
+
         thread.setPythonData(threadId)
         _threads[threadId] = (thread, {}, wrapper)
         return threadId
-        
+
     finally:
         _threadsLock.release()
 
@@ -130,7 +129,7 @@ def _get_thread_wrapper(thread, wrapperClass):
     threadId = thread.getPythonData()
     if threadId is None:
         # The thread has never been assigned a threadId.  Go assign one.
-        
+
         global _nextThreadId
         _threadsLock.acquire()
         try:
@@ -166,7 +165,7 @@ def _get_thread_locals(thread, i):
     threadId = thread.getPythonData()
     if threadId is None:
         # The thread has never been assigned a threadId.  Go assign one.
-        
+
         global _nextThreadId
         _threadsLock.acquire()
         try:
@@ -195,14 +194,14 @@ def _get_thread_locals(thread, i):
 
 def _remove_thread_id(threadId):
     """ Removes the thread with the indicated ID from the thread list. """
-    
+
     _threadsLock.acquire()
     try:
         thread, locals, wrapper = _threads[threadId]
         assert thread.getPythonData() == threadId
         del _threads[threadId]
         thread.setPythonData(None)
-        
+
     finally:
         _threadsLock.release()
 
@@ -218,7 +217,7 @@ def allocate_lock():
     return LockType()
 
 def get_ident():
-    return pm.Thread.getCurrentThread().this
+    return core.Thread.getCurrentThread().this
 
 def stack_size(size = 0):
     raise error
@@ -244,18 +243,18 @@ class _local(object):
             _threadsLock.release()
 
     def __setattr__(self, key, value):
-        d = _get_thread_locals(pm.Thread.getCurrentThread(), id(self))
+        d = _get_thread_locals(core.Thread.getCurrentThread(), id(self))
         d[key] = value
-        
+
 ##     def __getattr__(self, key):
-##         d = _get_thread_locals(pm.Thread.getCurrentThread(), id(self))
+##         d = _get_thread_locals(core.Thread.getCurrentThread(), id(self))
 ##         try:
 ##             return d[key]
 ##         except KeyError:
 ##             raise AttributeError
 
     def __getattribute__(self, key):
-        d = _get_thread_locals(pm.Thread.getCurrentThread(), id(self))
+        d = _get_thread_locals(core.Thread.getCurrentThread(), id(self))
         if key == '__dict__':
             return d
         try:
@@ -263,5 +262,5 @@ class _local(object):
         except KeyError:
             return object.__getattribute__(self, key)
 
-        
-        
+
+

+ 48 - 51
direct/src/stdpy/threading.py

@@ -21,11 +21,8 @@ easier to use and understand.
 It is permissible to mix-and-match both threading and threading2
 within the same application. """
 
-# We import PandaModules as the name pm, so we can avoid namespace
-# collisions between native Panda objects, and our own class
-# definitions in this module. """
 import direct
-from pandac import PandaModules as pm
+from panda3d import core
 from direct.stdpy import thread as _thread
 import sys as _sys
 
@@ -55,10 +52,10 @@ class ThreadBase:
 
     def getName(self):
         return self.name
-    
+
     def is_alive(self):
         return self.__thread.isStarted()
-    
+
     def isAlive(self):
         return self.__thread.isStarted()
 
@@ -68,9 +65,9 @@ class ThreadBase:
     def setDaemon(self, daemon):
         if self.is_alive():
             raise RuntimeError
-        
+
         self.__dict__['daemon'] = daemon
-        
+
     def __setattr__(self, key, value):
         if key == 'name':
             self.setName(value)
@@ -84,8 +81,8 @@ class ThreadBase:
 # Copy these static methods from Panda's Thread object.  These are
 # useful if you may be running in Panda's SIMPLE_THREADS compilation
 # mode.
-ThreadBase.forceYield = pm.Thread.forceYield
-ThreadBase.considerYield = pm.Thread.considerYield
+ThreadBase.forceYield = core.Thread.forceYield
+ThreadBase.considerYield = core.Thread.considerYield
 
 class Thread(ThreadBase):
     """ This class provides a wrapper around Panda's PythonThread
@@ -108,21 +105,21 @@ class Thread(ThreadBase):
         self.__dict__['daemon'] = current.daemon
         self.__dict__['name'] = name
 
-        self.__thread = pm.PythonThread(self.run, None, name, name)
+        self.__thread = core.PythonThread(self.run, None, name, name)
         threadId = _thread._add_thread(self.__thread, weakref.proxy(self))
         self.__dict__['ident'] = threadId
 
     def __del__(self):
         # On interpreter shutdown, the _thread module might have
         # already been cleaned up.
-        if _thread and _thread._remove_thread_id: 
+        if _thread and _thread._remove_thread_id:
             _thread._remove_thread_id(self.ident)
 
     def start(self):
         if self.__thread.isStarted():
             raise RuntimeError
-        
-        if not self.__thread.start(pm.TPNormal, True):
+
+        if not self.__thread.start(core.TPNormal, True):
             raise RuntimeError
 
     def run(self):
@@ -132,7 +129,7 @@ class Thread(ThreadBase):
             _sys.setprofile(_setprofile_func)
 
         self.__target(*self.__args, **self.__kwargs)
-        
+
     def join(self, timeout = None):
         # We don't support a timed join here, sorry.
         assert timeout is None
@@ -146,10 +143,10 @@ class Thread(ThreadBase):
 class ExternalThread(ThreadBase):
     """ Returned for a Thread object that wasn't created by this
     interface. """
-    
+
     def __init__(self, extThread, threadId):
         ThreadBase.__init__(self)
-        
+
         self.__thread = extThread
         self.__dict__['daemon'] = True
         self.__dict__['name'] = self.__thread.getName()
@@ -157,10 +154,10 @@ class ExternalThread(ThreadBase):
 
     def start(self):
         raise RuntimeError
-    
+
     def run(self):
         raise RuntimeError
-        
+
     def join(self, timeout = None):
         raise RuntimeError
 
@@ -169,53 +166,53 @@ class ExternalThread(ThreadBase):
 
 class MainThread(ExternalThread):
     """ Returned for the MainThread object. """
-    
+
     def __init__(self, extThread, threadId):
         ExternalThread.__init__(self, extThread, threadId)
         self.__dict__['daemon'] = False
 
-class Lock(pm.Mutex):
+class Lock(core.Mutex):
     """ This class provides a wrapper around Panda's Mutex object.
     The wrapper is designed to emulate Python's own threading.Lock
     object. """
 
     def __init__(self, name = "PythonLock"):
-        pm.Mutex.__init__(self, name)
+        core.Mutex.__init__(self, name)
 
     def acquire(self, blocking = True):
         if blocking:
-            pm.Mutex.acquire(self)
+            core.Mutex.acquire(self)
             return True
         else:
-            return pm.Mutex.tryAcquire(self)
-    
+            return core.Mutex.tryAcquire(self)
+
     __enter__ = acquire
 
     def __exit__(self, t, v, tb):
         self.release()
 
-class RLock(pm.ReMutex):
+class RLock(core.ReMutex):
     """ This class provides a wrapper around Panda's ReMutex object.
     The wrapper is designed to emulate Python's own threading.RLock
     object. """
 
     def __init__(self, name = "PythonRLock"):
-        pm.ReMutex.__init__(self, name)
+        core.ReMutex.__init__(self, name)
 
     def acquire(self, blocking = True):
         if blocking:
-            pm.ReMutex.acquire(self)
+            core.ReMutex.acquire(self)
             return True
         else:
-            return pm.ReMutex.tryAcquire(self)
-    
+            return core.ReMutex.tryAcquire(self)
+
     __enter__ = acquire
 
     def __exit__(self, t, v, tb):
         self.release()
 
 
-class Condition(pm.ConditionVarFull):
+class Condition(core.ConditionVarFull):
     """ This class provides a wrapper around Panda's ConditionVarFull
     object.  The wrapper is designed to emulate Python's own
     threading.Condition object. """
@@ -229,7 +226,7 @@ class Condition(pm.ConditionVarFull):
         assert isinstance(lock, Lock)
 
         self.__lock = lock
-        pm.ConditionVarFull.__init__(self, self.__lock)
+        core.ConditionVarFull.__init__(self, self.__lock)
 
     def acquire(self, *args, **kw):
         return self.__lock.acquire(*args, **kw)
@@ -239,35 +236,35 @@ class Condition(pm.ConditionVarFull):
 
     def wait(self, timeout = None):
         if timeout is None:
-            pm.ConditionVarFull.wait(self)
+            core.ConditionVarFull.wait(self)
         else:
-            pm.ConditionVarFull.wait(self, timeout)
+            core.ConditionVarFull.wait(self, timeout)
 
     def notifyAll(self):
-        pm.ConditionVarFull.notifyAll(self)
+        core.ConditionVarFull.notifyAll(self)
 
     notify_all = notifyAll
-    
+
     __enter__ = acquire
 
     def __exit__(self, t, v, tb):
         self.release()
 
-class Semaphore(pm.Semaphore):
+class Semaphore(core.Semaphore):
     """ This class provides a wrapper around Panda's Semaphore
     object.  The wrapper is designed to emulate Python's own
     threading.Semaphore object. """
 
     def __init__(self, value = 1):
-        pm.Semaphore.__init__(self, value)
+        core.Semaphore.__init__(self, value)
 
     def acquire(self, blocking = True):
         if blocking:
-            pm.Semaphore.acquire(self)
+            core.Semaphore.acquire(self)
             return True
         else:
-            return pm.Semaphore.tryAcquire(self)
-    
+            return core.Semaphore.tryAcquire(self)
+
     __enter__ = acquire
 
     def __exit__(self, t, v, tb):
@@ -293,8 +290,8 @@ class Event:
     object. """
 
     def __init__(self):
-        self.__lock = pm.Lock("Python Event")
-        self.__cvar = pm.ConditionVarFull(self.__lock)
+        self.__lock = core.Lock("Python Event")
+        self.__cvar = core.ConditionVarFull(self.__lock)
         self.__flag = False
 
     def is_set(self):
@@ -310,7 +307,7 @@ class Event:
 
         finally:
             self.__lock.release()
-            
+
     def clear(self):
         self.__lock.acquire()
         try:
@@ -318,7 +315,7 @@ class Event:
 
         finally:
             self.__lock.release()
-            
+
     def wait(self, timeout = None):
         self.__lock.acquire()
         try:
@@ -326,15 +323,15 @@ class Event:
                 while not self.__flag:
                     self.__cvar.wait()
             else:
-                clock = pm.TrueClock.getGlobalPtr()
+                clock = core.TrueClock.getGlobalPtr()
                 expires = clock.getShortTime() + timeout
                 while not self.__flag:
                     wait = expires - clock.getShortTime()
                     if wait < 0:
                         return
-                    
+
                     self.__cvar.wait(wait)
-                
+
         finally:
             self.__lock.release()
 
@@ -366,7 +363,7 @@ class Timer(Thread):
 
 def _create_thread_wrapper(t, threadId):
     """ Creates a thread wrapper for the indicated external thread. """
-    if isinstance(t, pm.MainThread):
+    if isinstance(t, core.MainThread):
         pyt = MainThread(t, threadId)
     else:
         pyt = ExternalThread(t, threadId)
@@ -374,7 +371,7 @@ def _create_thread_wrapper(t, threadId):
     return pyt
 
 def current_thread():
-    t = pm.Thread.getCurrentThread()
+    t = core.Thread.getCurrentThread()
     return _thread._get_thread_wrapper(t, _create_thread_wrapper)
 
 currentThread = current_thread
@@ -410,7 +407,7 @@ def stack_size(size = None):
 def _test():
 
     from collections import deque
-    _sleep = pm.Thread.sleep
+    _sleep = core.Thread.sleep
 
     _VERBOSE = False
 

+ 2 - 2
direct/src/stdpy/threading2.py

@@ -17,8 +17,8 @@ import sys as _sys
 
 from direct.stdpy import thread
 from direct.stdpy.thread import stack_size, _local as local
-from pandac import PandaModules as pm
-_sleep = pm.Thread.sleep
+from panda3d import core
+_sleep = core.Thread.sleep
 
 from time import time as _time
 from traceback import format_exc as _format_exc

+ 15 - 15
direct/src/task/Task.py

@@ -16,7 +16,7 @@ import time
 import random
 import string
 
-from pandac.PandaModules import *
+from panda3d.core import *
 
 def print_exc_plus():
     """
@@ -114,7 +114,7 @@ class TaskManager:
 
         self._frameProfileQueue = Queue()
 
-        # this will be set when it's safe to import StateVar 
+        # this will be set when it's safe to import StateVar
         self._profileFrames = None
         self._frameProfiler = None
         self._profileTasks = None
@@ -231,7 +231,7 @@ class TaskManager:
         in proportion to their time used and to their priority value.
         See AsyncTaskManager.setTimeslicePriority() for more.
         """
-        
+
         chain = self.mgr.makeTaskChain(chainName)
         if numThreads is not None:
             chain.setNumThreads(numThreads)
@@ -249,7 +249,7 @@ class TaskManager:
     def hasTaskNamed(self, taskName):
         """Returns true if there is at least one task, active or
         sleeping, with the indicated name. """
-        
+
         return bool(self.mgr.findTask(taskName))
 
     def getTasksNamed(self, taskName):
@@ -261,7 +261,7 @@ class TaskManager:
         """Returns a list of all tasks, active or sleeping, with a
         name that matches the pattern, which can include standard
         shell globbing characters like *, ?, and []. """
-        
+
         return self.__makeTaskList(self.mgr.findTasksMatching(GlobPattern(taskPattern)))
 
     def getAllTasks(self):
@@ -298,7 +298,7 @@ class TaskManager:
         wish to specify a task that will run in the next frame, use a
         delayTime of 0.
         """
-        
+
         if delayTime < 0:
             assert self.notify.warning('doMethodLater: added task: %s with negative delay: %s' % (name, delayTime))
 
@@ -310,7 +310,7 @@ class TaskManager:
     def add(self, funcOrTask, name = None, sort = None, extraArgs = None,
             priority = None, uponDeath = None, appendTask = False,
             taskChain = None, owner = None):
-        
+
         """
         Add a new task to the taskMgr.  The task will begin executing
         immediately, or next frame if its sort value has already
@@ -363,7 +363,7 @@ class TaskManager:
         added, or the original Task object that was passed in.
 
         """
-        
+
         task = self.__setupTask(funcOrTask, name, priority, sort, extraArgs, taskChain, appendTask, owner, uponDeath)
         self.mgr.add(task)
         return task
@@ -412,7 +412,7 @@ class TaskManager:
             task.setUponDeath(uponDeath)
 
         return task
-        
+
     def remove(self, taskOrName):
         """Removes a task from the task manager.  The task is stopped,
         almost as if it had returned task.done.  (But if the task is
@@ -421,7 +421,7 @@ class TaskManager:
         Task object, or the name of a task.  If you specify a name,
         all tasks with the indicated name are removed.  Returns the
         number of tasks removed. """
-        
+
         if isinstance(taskOrName, types.StringTypes):
             tasks = self.mgr.findTasks(taskOrName)
             return self.mgr.remove(tasks)
@@ -463,7 +463,7 @@ class TaskManager:
         # This is the spot for an internal yield function
         nextTaskTime = self.mgr.getNextWakeTime()
         self.doYield(startFrameTime, nextTaskTime)
-        
+
         # Restore default interrupt handler
         signal.signal(signal.SIGINT, signal.default_int_handler)
         if self.fKeyboardInterrupt:
@@ -472,7 +472,7 @@ class TaskManager:
     def run(self):
         """Starts the task manager running.  Does not return until an
         exception is encountered (including KeyboardInterrupt). """
-        
+
         # Set the clock to have last frame's time in case we were
         # Paused at the prompt for a long time
         t = self.globalClock.getFrameTime()
@@ -554,7 +554,7 @@ class TaskManager:
     def __tryReplaceTaskMethod(self, task, oldMethod, newFunction):
         if not isinstance(task, PythonTask):
             return 0
-        
+
         method = task.getFunction()
         if (type(method) == types.MethodType):
             function = method.im_func
@@ -730,10 +730,10 @@ class TaskManager:
         delta = minFinTime - self.globalClock.getRealTime()
         while(delta > 0.002):
             print ' sleep %s'% (delta)
-            time.sleep(delta)           
+            time.sleep(delta)
             delta = minFinTime - self.globalClock.getRealTime()
     """
-    
+
     if __debug__:
         # to catch memory leaks during the tests at the bottom of the file
         def _startTrackingMemLeaks(self):

+ 0 - 71
direct/src/test/ModelScreenShot.py

@@ -1,71 +0,0 @@
-import direct
-from pandac.PandaModules import loadPrcFileData
-
-from direct.showbase.DirectObject import DirectObject
-from direct.directbase.DirectStart import *
-from pandac.PandaModules import *
-import direct.gui.DirectGuiGlobals as DGG
-from direct.gui.DirectGui import *
-from direct.task import Task
-
-from direct.directnotify import DirectNotifyGlobal
-import math
-from operator import *
-
-import ModelScreenShotGlobals
-
-class ModelScreenShot(DirectObject):
-    notify = DirectNotifyGlobal.directNotify.newCategory("ModelScreenShot")
-
-    def __init__(self):
-
-        # Grab a list of models to capture screenshots of from an array in
-        # the globals file
-        self.modelsToView = ModelScreenShotGlobals.models
-        self.models = []
-
-        # Attach all the models listed to render and save a pointer to them
-        # in an array.  Then hide the model.
-        for model in self.modelsToView:
-            m = loader.loadModel(model)
-            m.reparentTo(render)
-            self.models.append(m)
-            m.hide()
-
-        # Set a nice farplane far, far away
-        self.lens = base.camera.getChild(0).node().getLens()
-        self.lens.setFar(10000)
-
-        # Hide the cursor
-        self.props = WindowProperties()
-        self.props.setCursorHidden(0)
-        base.win.requestProperties(self.props)
-
-        # Method for getting the distance to an object from the camera
-        def getDist(obj, lens):
-            rad = obj.getBounds().getRadius()
-            fov = lens.getFov()
-            dist = rad / math.tan(deg2Rad(min(fov[0], fov[1]/2.0)))
-            return dist
-
-        # Determin the optimal camera position
-        def getOptCamPos(obj, dist):
-            cen = obj.getBounds().getCenter()
-            camPos = VBase3(cen.getX(), -dist, cen.getZ())
-            return camPos
-
-        # Generate screenshots
-        def generatePics():
-            for model in self.models:
-                model.show()
-                base.camera.setPos(getOptCamPos(model, getDist(model, self.lens)))
-                uFilename = model.getName().replace('.egg','.jpg')
-                self.notify.info("screenshot %s   camera pos: %s" % (uFilename, base.camera.getPos()))
-                base.graphicsEngine.renderFrame()
-                base.screenshot(namePrefix = uFilename, defaultFilename = 0)
-                model.hide()
-
-        generatePics()
-        
-mss = ModelScreenShot()
-run()

+ 0 - 6
direct/src/test/ModelScreenShotGlobals.py

@@ -1,6 +0,0 @@
-# Replace these with the models you want to screenshot
-models = [
-    'models/misc/smiley',
-    'models/misc/sphere',
-    'models/misc/xyzAxis'
-]

+ 0 - 0
direct/src/test/Sources.pp


+ 0 - 0
direct/src/test/__init__.py


+ 2 - 5
doc/Config.pp.sample

@@ -32,13 +32,10 @@
 
 // Note the use of the Panda filename convention, with forward slashes
 // instead of backslashes, and /c/ instead of c:/ .
-#define DX8_IPATH /c/DXSDK-OCT2004/include
-#define DX8_LPATH /c/DXSDK-OCT2004/lib
 #define DX9_IPATH /c/DXSDK-DEC2006/include
 #define DX9_LPATH /c/DXSDK-DEC2006/lib
 
-// If for any reason you need to turn off either the DX8 or DX9 builds,
-// you can uncomment one of the following lines.  (Defining a
+// If for any reason you need to turn off the DX9 builds,
+// you can uncomment the following line.  (Defining a
 // variable to an empty string means setting it false.)
-//#define HAVE_DX8
 //#define HAVE_DX9

+ 0 - 1
doc/INSTALL-MK

@@ -141,7 +141,6 @@ it will show you the available command-line options:
   --use-max9        --no-max9      (enable/disable use of MAX9)
   --use-max2009     --no-max2009   (enable/disable use of MAX2009)
   --use-max2010     --no-max2010   (enable/disable use of MAX2010)
-  --use-dx8         --no-dx8       (enable/disable use of DX8)
   --use-dx9         --no-dx9       (enable/disable use of DX9)
   --use-python      --no-python    (enable/disable use of PYTHON)
   --use-zlib        --no-zlib      (enable/disable use of ZLIB)

+ 3 - 8
doc/INSTALL-PP

@@ -251,13 +251,13 @@ indicate a variable is true by defining it to some nonempty string
 (e.g. "yes" or "1"), and false by defining it to nothing.  For
 example:
 
-  #define HAVE_DX8 1
+  #define HAVE_DX9 1
 
 Indicates you have the DirectX SDK installed, while
 
-  #define HAVE_DX8
+  #define HAVE_DX9
 
-Indicates you do not.  Do not be tempted to define HAVE_DX8 to no or 0;
+Indicates you do not.  Do not be tempted to define HAVE_DX9 to no or 0;
 since these are both nonempty strings, they are considered to
 represent true!  Also, don't try to use a pair of quotation marks to
 represent the empty string, since the quotation marks become part of
@@ -312,11 +312,6 @@ find useful are:
     header and library files, and the name of the VRPN libraries, if
     VRPN is installed on your system.
 
-  DX8_IPATH / DX8_LPATH / DX8_LIBS - the full pathname to the
-    DirectX 8.1 SDK header and library files, if you have installed
-    this SDK. (You must currently install this SDK in order to
-    build DirectX8 support for Panda.)
-
   DX9_IPATH / DX9_LPATH / DX9_LIBS - the full pathname to the
     DirectX 9 SDK header and library files, if you have installed
     this SDK. (You must currently install this SDK in order to

+ 0 - 33
doc/README

@@ -1,33 +0,0 @@
-Panda3D is an open source 3D Engine originally developed, and still
-actively maintained, by the Walt Disney VR Studio.  Additional
-development and support for the open source community is provided by
-the Entertainment Technology Center of Carnegie Mellon University.
-
-At the present, we are providing two completely unrelated systems for
-building Panda.  The original build system, ppremake, is still in
-active use by the VR Studio, and is useful if you want advanced build
-control. The other build system, makepanda, is designed to build
-quickly and painlessly, and is used to generate the official releases.
-
-The ppremake system is a makefile generator, and allows you to
-configure your build environment to a high degree of customization.
-It is a fairly complex build system, and it requires some comfort with
-using the command-line make utilities.
-
-The makepanda system is a Python script that directly invokes the
-compiler to build the Panda sources.  Its emphasis is on providing a
-hands-off, simple approach to building Panda.
-
-Both systems may require you to first install a number of third-party
-tools if you would like to make them available for Panda, such as
-FreeType or OpenSSL.  You may also download a zip file that contains
-precompiled versions of these third-party libraries from the Panda
-website, which is especially useful when used in conjunction with the
-makepanda system.
-
-If you are interested in compiling Panda for yourself, you are welcome
-to use either build system.  Please refer to the documents INSTALL-PP
-or INSTALL-MK, in this directory, for build instructions for ppremake
-and makepanda, respectively.  You may also be interested in
-downloading the prebuilt Panda3D binaries from the Panda website at
-http://www.panda3d.org/ .

+ 21 - 0
doc/man/bam-info.1

@@ -0,0 +1,21 @@
+.\" Automatically generated by bam-info -write-bam
+.TH BAM-INFO 1 "27 December 2014" "1.9.0" Panda3D
+.SH NAME
+bam-info \- describe the contents of .bam files
+.SH SYNOPSIS
+\fBbam-info\fR [opts] input.bam [input.bam ... ]
+.SH DESCRIPTION
+This program scans one or more Bam files\-\-Panda's Binary Animation and Models native binary format\-\-and describes their contents.
+.SH OPTIONS
+.TP
+.B \-ls
+List the scene graph hierarchy in the bam file.
+.TP
+.B \-t
+List explicitly each transition in the hierarchy.
+.TP
+.B \-g
+Output verbose information about the each Geom in the Bam file.
+.TP
+.B \-h
+Display this help page.

+ 31 - 0
doc/man/bam2egg.1

@@ -0,0 +1,31 @@
+.\" Automatically generated by bam2egg -write-bam
+.TH BAM2EGG 1 "27 December 2014" "1.9.0" Panda3D
+.SH NAME
+bam2egg \- convert a native Panda .bam file to an .egg file
+.SH SYNOPSIS
+\fBbam2egg\fR [opts] input.bam output.egg
+.br
+\fBbam2egg\fR [opts] -o output.egg input.bam
+.br
+\fBbam2egg\fR [opts] input.bam >output.egg
+.SH DESCRIPTION
+This program converts native Panda bam files to egg.  The conversion is somewhat incomplete; running egg2bam followed by bam2egg should not be expected to yield the same egg file you started with.
+.SH OPTIONS
+.TP
+.B \-noabs
+Don't allow the input bam file to have absolute pathnames.  If it does, abort with an error.  This option is designed to help detect errors when populating or building a standalone model tree, which should be self-contained and include only relative pathnames.
+.TP
+.B \-noexist
+Don't treat it as an error if the input file references pathnames (e.g. textures) that don't exist.  Normally, this will be flagged as an error and the command aborted; with this option, an egg file will be generated anyway, referencing pathnames that do not exist.
+.TP
+.B \-ignore
+Ignore non-fatal errors and generate an egg file anyway.
+.TP
+.BI "\-o " "filename"
+Specify the filename to which the resulting egg file will be written.  If this option is omitted, the last parameter name is taken to be the name of the output file, or standard output is used if there are no other parameters.
+.TP
+.BI "\-cs " "coordinate-system"
+Specify the coordinate system of the input bam file.  By default, this is taken from the Config.prc file, which is currently zup_right.
+.TP
+.B \-h
+Display this help page.

+ 75 - 0
doc/man/dae2egg.1

@@ -0,0 +1,75 @@
+.\" Automatically generated by dae2egg -write-bam
+.TH DAE2EGG 1 "27 December 2014" "1.9.0" Panda3D
+.SH NAME
+dae2egg \- convert COLLADA assets into .egg files
+.SH SYNOPSIS
+\fBdae2egg\fR [opts] input.dae output.egg
+.br
+\fBdae2egg\fR [opts] -o output.egg input.dae
+.br
+\fBdae2egg\fR [opts] input.dae >output.egg
+.SH DESCRIPTION
+This program converts .dae files (COLLADA Digital Asset Exchange) to .egg.
+.SH OPTIONS
+.TP
+.B \-noabs
+Don't allow the input COLLADA file to have absolute pathnames.  If it does, abort with an error.  This option is designed to help detect errors when populating or building a standalone model tree, which should be self-contained and include only relative pathnames.
+.TP
+.B \-noexist
+Don't treat it as an error if the input file references pathnames (e.g. textures) that don't exist.  Normally, this will be flagged as an error and the command aborted; with this option, an egg file will be generated anyway, referencing pathnames that do not exist.
+.TP
+.B \-ignore
+Ignore non-fatal errors and generate an egg file anyway.
+.TP
+.B \-invtrans
+Import the .dae file using inverted transparency. This is useful when importing COLLADA files from some authoring tools that export models with inverted transparency, such as Google SketchUp.
+.TP
+.BI "\-ui " "units"
+Specify the units of the input COLLADA file.  Normally, this can be inferred from the file itself.
+.TP
+.BI "\-uo " "units"
+Specify the units of the resulting egg file.  If this is specified, the vertices in the egg file will be scaled as necessary to make the appropriate units conversion; otherwise, the vertices will be left as they are.
+.TP
+.B \-no
+Strip all normals.
+.TP
+.B \-np
+Strip existing normals and redefine polygon normals.
+.TP
+.BI "\-nv " "threshold"
+Strip existing normals and redefine vertex normals.  Consider an edge between adjacent polygons to be smooth if the angle between them is less than threshold degrees.
+.TP
+.B \-nn
+Preserve normals exactly as they are.  This is the default.
+.TP
+.BI "\-tbn " "name"
+Compute tangent and binormal for the named texture coordinate set(s).  The name may include wildcard characters such as * and ?.  The normal must already exist or have been computed via one of the above options.  The tangent and binormal are used to implement bump mapping and related texture-based lighting effects.  This option may be repeated as necessary to name multiple texture coordinate sets.
+.TP
+.B \-tbnall
+Compute tangent and binormal for all texture coordinate sets.  This is equivalent to -tbn "*".
+.TP
+.B \-tbnauto
+Compute tangent and binormal for all normal maps. 
+.TP
+.BI "\-TS " "sx[,sy,sz]"
+Scale the model uniformly by the given factor (if only one number is given) or in each axis by sx, sy, sz (if three numbers are given).
+.TP
+.BI "\-TR " "x,y,z"
+Rotate the model x degrees about the x axis, then y degrees about the y axis, and then z degrees about the z axis.
+.TP
+.BI "\-TA " "angle,x,y,z"
+Rotate the model angle degrees counterclockwise about the given axis.
+.TP
+.BI "\-TT " "x,y,z"
+Translate the model by the indicated amount.
+
+All transformation options (-TS, -TR, -TA, -TT) are cumulative and are applied in the order they are encountered on the command line.
+.TP
+.BI "\-o " "filename"
+Specify the filename to which the resulting egg file will be written.  If this option is omitted, the last parameter name is taken to be the name of the output file, or standard output is used if there are no other parameters.
+.TP
+.BI "\-cs " "coordinate-system"
+Specify the coordinate system of the input COLLADA file.  Normally, this can inferred from the file itself.
+.TP
+.B \-h
+Display this help page.

+ 16 - 0
doc/man/dxf-points.1

@@ -0,0 +1,16 @@
+.\" Automatically generated by dxf-points -write-bam
+.TH DXF-POINTS 1 "27 December 2014" "1.9.0" Panda3D
+.SH NAME
+dxf-points \- extract points from AutoCAD .dxf files
+.SH SYNOPSIS
+\fBdxf-points\fR [opts] input.dxf > output.txt
+.br
+\fBdxf-points\fR [opts] -o output.txt input.dxf
+.br
+\fBdxf-points\fR [opts] input.dxf output.txt
+.SH DESCRIPTION
+This program reads an AutoCAD .dxf file and generates a simple list of all the points contained within it, one per line, to a text file, or to standard output.
+.SH OPTIONS
+.TP
+.B \-h
+Display this help page.

+ 72 - 0
doc/man/dxf2egg.1

@@ -0,0 +1,72 @@
+.\" Automatically generated by dxf2egg -write-bam
+.TH DXF2EGG 1 "27 December 2014" "1.9.0" Panda3D
+.SH NAME
+dxf2egg \- convert AutoCAD .dxf files to .egg files
+.SH SYNOPSIS
+\fBdxf2egg\fR [opts] input.dxf output.egg
+.br
+\fBdxf2egg\fR [opts] -o output.egg input.dxf
+.br
+\fBdxf2egg\fR [opts] input.dxf >output.egg
+.SH DESCRIPTION
+This program converts DXF (AutoCAD interchange format) to egg.  It only converts polygon data, with no fancy tricks.  DXF does not support hierarchical databases, so dxf2egg creates a single group at the root level for each layer in the DXF file.
+.SH OPTIONS
+.TP
+.B \-noabs
+Don't allow the input DXF file to have absolute pathnames.  If it does, abort with an error.  This option is designed to help detect errors when populating or building a standalone model tree, which should be self-contained and include only relative pathnames.
+.TP
+.B \-noexist
+Don't treat it as an error if the input file references pathnames (e.g. textures) that don't exist.  Normally, this will be flagged as an error and the command aborted; with this option, an egg file will be generated anyway, referencing pathnames that do not exist.
+.TP
+.B \-ignore
+Ignore non-fatal errors and generate an egg file anyway.
+.TP
+.BI "\-ui " "units"
+Specify the units of the input DXF file.  Normally, this can be inferred from the file itself.
+.TP
+.BI "\-uo " "units"
+Specify the units of the resulting egg file.  If this is specified, the vertices in the egg file will be scaled as necessary to make the appropriate units conversion; otherwise, the vertices will be left as they are.
+.TP
+.B \-no
+Strip all normals.
+.TP
+.B \-np
+Strip existing normals and redefine polygon normals.
+.TP
+.BI "\-nv " "threshold"
+Strip existing normals and redefine vertex normals.  Consider an edge between adjacent polygons to be smooth if the angle between them is less than threshold degrees.
+.TP
+.B \-nn
+Preserve normals exactly as they are.  This is the default.
+.TP
+.BI "\-tbn " "name"
+Compute tangent and binormal for the named texture coordinate set(s).  The name may include wildcard characters such as * and ?.  The normal must already exist or have been computed via one of the above options.  The tangent and binormal are used to implement bump mapping and related texture-based lighting effects.  This option may be repeated as necessary to name multiple texture coordinate sets.
+.TP
+.B \-tbnall
+Compute tangent and binormal for all texture coordinate sets.  This is equivalent to -tbn "*".
+.TP
+.B \-tbnauto
+Compute tangent and binormal for all normal maps. 
+.TP
+.BI "\-TS " "sx[,sy,sz]"
+Scale the model uniformly by the given factor (if only one number is given) or in each axis by sx, sy, sz (if three numbers are given).
+.TP
+.BI "\-TR " "x,y,z"
+Rotate the model x degrees about the x axis, then y degrees about the y axis, and then z degrees about the z axis.
+.TP
+.BI "\-TA " "angle,x,y,z"
+Rotate the model angle degrees counterclockwise about the given axis.
+.TP
+.BI "\-TT " "x,y,z"
+Translate the model by the indicated amount.
+
+All transformation options (-TS, -TR, -TA, -TT) are cumulative and are applied in the order they are encountered on the command line.
+.TP
+.BI "\-o " "filename"
+Specify the filename to which the resulting egg file will be written.  If this option is omitted, the last parameter name is taken to be the name of the output file, or standard output is used if there are no other parameters.
+.TP
+.BI "\-cs " "coordinate-system"
+Specify the coordinate system of the input DXF file.  Normally, this is z-up.
+.TP
+.B \-h
+Display this help page.

Some files were not shown because too many files changed in this diff