Browse Source

Remove ppremake, genPyCode, and all hacks created to support them

rdb 10 years ago
parent
commit
db3ab953e4
100 changed files with 10 additions and 11623 deletions
  1. 0 60
      contrib/Package.pp
  2. 0 10
      contrib/Sources.pp
  3. 0 4
      contrib/src/Sources.pp
  4. 0 82
      contrib/src/ai/Sources.pp
  5. 0 13
      contrib/src/contribbase/Sources.pp
  6. 0 2
      contrib/src/sceneeditor/Sources.pp
  7. 0 60
      direct/Package.pp
  8. 0 11
      direct/Sources.pp
  9. 0 7
      direct/metalibs/Sources.pp
  10. 0 32
      direct/metalibs/direct/Sources.pp
  11. 0 4
      direct/src/Sources.pp
  12. 0 3
      direct/src/actor/Sources.pp
  13. 0 0
      direct/src/cluster/Sources.pp
  14. 0 10
      direct/src/configfiles/Sources.pp
  15. 0 32
      direct/src/configfiles/direct.prc.pp
  16. 0 0
      direct/src/controls/Sources.pp
  17. 0 23
      direct/src/dcparse/Sources.pp
  18. 0 64
      direct/src/dcparser/Sources.pp
  19. 0 28
      direct/src/deadrec/Sources.pp
  20. 0 20
      direct/src/directbase/Sources.pp
  21. 0 23
      direct/src/directd/Sources.pp
  22. 0 30
      direct/src/directdServer/Sources.pp
  23. 0 0
      direct/src/directdevices/Sources.pp
  24. 0 0
      direct/src/directnotify/Sources.pp
  25. 0 8
      direct/src/directscripts/Sources.pp
  26. 0 0
      direct/src/directtools/Sources.pp
  27. 0 0
      direct/src/directutil/Sources.pp
  28. 0 25
      direct/src/distributed/Sources.pp
  29. 0 20
      direct/src/extensions_native/Sources.pp
  30. 3 115
      direct/src/extensions_native/extension_native_helpers.py
  31. 0 3
      direct/src/ffi/.gitignore
  32. 0 344
      direct/src/ffi/DoGenPyCode.py
  33. 0 24
      direct/src/ffi/FFIConstants.py
  34. 0 32
      direct/src/ffi/FFIEnvironment.py
  35. 0 268
      direct/src/ffi/FFIExternalObject.py
  36. 0 870
      direct/src/ffi/FFIInterrogateDatabase.py
  37. 0 472
      direct/src/ffi/FFIOverload.py
  38. 0 151
      direct/src/ffi/FFIRename.py
  39. 0 586
      direct/src/ffi/FFISpecs.py
  40. 0 1026
      direct/src/ffi/FFITypes.py
  41. 0 12
      direct/src/ffi/Sources.pp
  42. 0 0
      direct/src/ffi/__init__.py
  43. 0 220
      direct/src/ffi/genPyCode.pp
  44. 0 96
      direct/src/ffi/jGenPyCode.py
  45. 0 275
      direct/src/ffi/panda3d.py
  46. 0 4
      direct/src/filter/Sources.pp
  47. 0 0
      direct/src/fsm/Sources.pp
  48. 0 0
      direct/src/gui/Sources.pp
  49. 0 38
      direct/src/http/Sources.pp
  50. 0 54
      direct/src/interval/Sources.pp
  51. 0 3
      direct/src/leveleditor/Sources.pp
  52. 0 42
      direct/src/motiontrail/Sources.pp
  53. 0 77
      direct/src/p3d/Sources.pp
  54. 0 0
      direct/src/particles/Sources.pp
  55. 0 0
      direct/src/physics/Sources.pp
  56. 0 369
      direct/src/plugin/Sources.pp
  57. 0 38
      direct/src/plugin/p3d_plugin_config.h.pp
  58. 0 175
      direct/src/plugin_activex/P3DActiveX.rc.pp
  59. 0 42
      direct/src/plugin_activex/Sources.pp
  60. 0 62
      direct/src/plugin_npapi/Sources.pp
  61. 0 55
      direct/src/plugin_npapi/nppanda3d.rc.pp
  62. 0 142
      direct/src/plugin_standalone/Sources.pp
  63. 0 53
      direct/src/plugin_standalone/panda3d.rc.pp
  64. 2 5
      direct/src/showbase/PythonUtil.py
  65. 0 41
      direct/src/showbase/Sources.pp
  66. 0 0
      direct/src/showutil/Sources.pp
  67. 0 2
      direct/src/stdpy/Sources.pp
  68. 0 0
      direct/src/task/Sources.pp
  69. 0 3
      direct/src/tkpanels/Sources.pp
  70. 0 3
      direct/src/tkwidgets/Sources.pp
  71. 0 3
      direct/src/wxwidgets/Sources.pp
  72. 0 63
      dmodels/Package.pp
  73. 0 4
      dmodels/Sources.pp
  74. 0 4
      dmodels/src/Sources.pp
  75. 0 4
      dmodels/src/audio/Sources.pp
  76. 0 9
      dmodels/src/audio/sfx/Sources.pp
  77. 0 8
      dmodels/src/fonts/Sources.pp
  78. 0 14
      dmodels/src/gui/Sources.pp
  79. 0 30
      dmodels/src/icons/Sources.pp
  80. 0 13
      dmodels/src/level_editor/Sources.pp
  81. 0 3
      dmodels/src/maps/Sources.pp
  82. 0 22
      dmodels/src/misc/Sources.pp
  83. 0 41
      doc/Config.pp.sample
  84. 1 9
      doc/INSTALL
  85. 0 885
      doc/INSTALL-PP
  86. 0 374
      dtool/Config.Android.pp
  87. 0 12
      dtool/Config.Cygwin.pp
  88. 0 10
      dtool/Config.Cygwin64.pp
  89. 0 303
      dtool/Config.FreeBSD.pp
  90. 0 151
      dtool/Config.Irix.pp
  91. 0 339
      dtool/Config.Linux.pp
  92. 0 295
      dtool/Config.OSX.pp
  93. 0 171
      dtool/Config.Win32.pp
  94. 0 171
      dtool/Config.Win64.pp
  95. 0 1237
      dtool/Config.pp
  96. 0 722
      dtool/LocalSetup.pp
  97. 0 439
      dtool/Package.pp
  98. 4 0
      dtool/PandaVersion.pp
  99. 0 7
      dtool/Sources.pp
  100. 0 7
      dtool/metalibs/Sources.pp

+ 0 - 60
contrib/Package.pp

@@ -1,60 +0,0 @@
-//
-// Package.pp
-//
-// This file defines certain configuration variables that are to be
-// written into the various make scripts.  It is processed by ppremake
-// (along with the Sources.pp files in each of the various
-// contribories) to generate build scripts appropriate to each
-// environment.
-//
-// This is the package-specific file, which should be at the top of
-// every source hierarchy.  It generally gets the ball rolling, and is
-// responsible for explicitly including all of the relevent Config.pp
-// files.
-
-// What is the name and version of this source tree?
-#if $[eq $[PACKAGE],]
-  #define PACKAGE contrib
-  #define VERSION 0.80
-#endif
-
-
-// Where should we find the PANDA source contribory?
-#if $[PANDA_SOURCE]
-  #define PANDA_SOURCE $[unixfilename $[PANDA_SOURCE]]
-#elif $[or $[CTPROJS],$[PANDA]]
-  // If we are presently attached, use the environment variable.
-  #define PANDA_SOURCE $[unixfilename $[PANDA]]
-  #if $[eq $[PANDA],]
-    #error You seem to be attached to some trees, but not PANDA!
-  #endif
-#else
-  // Otherwise, if we are not attached, we guess that the source is a
-  // sibling contribory to this source root.
-  #define PANDA_SOURCE $[standardize $[TOPDIR]/../panda]
-#endif
-
-// Where should we install CONTRIB?
-#if $[CONTRIB_INSTALL]
-  #define CONTRIB_INSTALL $[unixfilename $[CONTRIB_INSTALL]]
-#elif $[CTPROJS]
-  #set CONTRIB $[unixfilename $[CONTRIB]]
-  #define CONTRIB_INSTALL $[CONTRIB]/built
-  #if $[eq $[CONTRIB],]
-    #error You seem to be attached to some trees, but not CONTRIB!
-  #endif
-#else
-  #defer CONTRIB_INSTALL $[unixfilename $[INSTALL_DIR]]
-#endif
-
-// Also get the PANDA Package file and everything that includes.
-#if $[not $[isfile $[PANDA_SOURCE]/Package.pp]]
-  #printvar PANDA_SOURCE
-  #error PANDA source contribory not found from contrib!  Are you attached properly?
-#endif
-
-#include $[PANDA_SOURCE]/Package.pp
-
-// Define the inter-tree dependencies.
-#define NEEDS_TREES panda $[NEEDS_TREES]
-#define DEPENDABLE_HEADER_DIRS $[DEPENDABLE_HEADER_DIRS] $[PANDA_INSTALL]/include

+ 0 - 10
contrib/Sources.pp

@@ -1,10 +0,0 @@
-// This is the toplevel directory for a package.
-
-#define DIR_TYPE toplevel
-
-#define REQUIRED_TREES dtool panda
-
-#define EXTRA_DIST \
-    Sources.pp Config.pp Package.pp
-
-#define PYTHON_PACKAGE 1

+ 0 - 4
contrib/src/Sources.pp

@@ -1,4 +0,0 @@
-// This is a group directory: a directory level above a number of
-// source subdirectories.
-
-#define DIR_TYPE group

+ 0 - 82
contrib/src/ai/Sources.pp

@@ -1,82 +0,0 @@
-#define LOCAL_LIBS p3contribbase
-#define BUILDING_DLL BUILDING_PANDAAI
-
-#define OTHER_LIBS \
-   panda:c \
-   p3express:c p3putil:c p3pandabase:c pandaexpress:m \
-   p3interrogatedb:c p3prc:c p3dconfig:c p3dtoolconfig:m \
-   p3dtoolutil:c p3dtoolbase:c p3dtool:m
-
-#begin lib_target
-  #define TARGET pandaai
-
-  #define COMBINED_SOURCES p3ai_composite1.cxx
-
-  #define SOURCES \
-    aiBehaviors.h \
-    aiCharacter.h \
-    aiGlobals.h \
-    aiNode.h \
-    aiPathFinder.h \
-    aiWorld.h \
-    arrival.h \
-    config_ai.h \
-    evade.h \
-    flee.h \
-    flock.h \
-    aiGlobals.h \
-    meshNode.h \
-    obstacleAvoidance.h \
-    pathFind.h \
-    pathFollow.h \
-    pursue.h \
-    seek.h \
-    wander.h
-
-  #define INCLUDED_SOURCES \
-    aiBehaviors.cxx \
-    aiCharacter.cxx \
-    aiNode.cxx \
-    aiPathFinder.cxx \
-    aiWorld.cxx \
-    p3ai_composite.cxx \
-    p3ai_composite1.cxx \
-    arrival.cxx \
-    config_ai.cxx \
-    evade.cxx \
-    flee.cxx \
-    flock.cxx \
-    meshNode.cxx \
-    obstacleAvoidance.cxx \
-    pathFind.cxx \
-    pathFollow.cxx \
-    pursue.cxx \
-    seek.cxx \
-    wander.cxx
-
-
-  #define INSTALL_HEADERS \
-    aiBehaviors.h \
-    aiCharacter.h \
-    aiGlobals.h \
-    aiNode.h \
-    aiPathFinder.h \
-    aiWorld.h \
-    arrival.h \
-    config_ai.h \
-    evade.h \
-    flee.h \
-    flock.h \
-    aiGlobals.h \
-    meshNode.h \
-    obstacleAvoidance.h \
-    pathFind.h \
-    pathFollow.h \
-    pursue.h \
-    seek.h \
-    wander.h
-
-  #define IGATESCAN all
-
-#end lib_target
-

+ 0 - 13
contrib/src/contribbase/Sources.pp

@@ -1,13 +0,0 @@
-#define OTHER_LIBS p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-                   p3dtoolutil:c p3dtoolbase:c p3dtool:m  p3prc:c
-
-#begin lib_target
-  #define TARGET p3contribbase
-
-  #define SOURCES \
-    contribbase.cxx contribbase.h contribsymbols.h \
-
-  #define INSTALL_HEADERS \
-    contribbase.h contribbase.h
-
-#end lib_target

+ 0 - 2
contrib/src/sceneeditor/Sources.pp

@@ -1,2 +0,0 @@
-// For now, since we are not installing Python files, this file can
-// remain empty.

+ 0 - 60
direct/Package.pp

@@ -1,60 +0,0 @@
-//
-// Package.pp
-//
-// This file defines certain configuration variables that are to be
-// written into the various make scripts.  It is processed by ppremake
-// (along with the Sources.pp files in each of the various
-// directories) to generate build scripts appropriate to each
-// environment.
-//
-// This is the package-specific file, which should be at the top of
-// every source hierarchy.  It generally gets the ball rolling, and is
-// responsible for explicitly including all of the relevent Config.pp
-// files.
-
-// What is the name and version of this source tree?
-#if $[eq $[PACKAGE],]
-  #define PACKAGE direct
-  #define VERSION 0.80
-#endif
-
-
-// Where should we find the PANDA source directory?
-#if $[PANDA_SOURCE]
-  #define PANDA_SOURCE $[unixfilename $[PANDA_SOURCE]]
-#elif $[or $[CTPROJS],$[PANDA]]
-  // If we are presently attached, use the environment variable.
-  #define PANDA_SOURCE $[unixfilename $[PANDA]]
-  #if $[eq $[PANDA],]
-    #error You seem to be attached to some trees, but not PANDA!
-  #endif
-#else
-  // Otherwise, if we are not attached, we guess that the source is a
-  // sibling directory to this source root.
-  #define PANDA_SOURCE $[standardize $[TOPDIR]/../panda]
-#endif
-
-// Where should we install DIRECT?
-#if $[DIRECT_INSTALL]
-  #define DIRECT_INSTALL $[unixfilename $[DIRECT_INSTALL]]
-#elif $[CTPROJS]
-  #set DIRECT $[unixfilename $[DIRECT]]
-  #define DIRECT_INSTALL $[DIRECT]/built
-  #if $[eq $[DIRECT],]
-    #error You seem to be attached to some trees, but not DIRECT!
-  #endif
-#else
-  #defer DIRECT_INSTALL $[unixfilename $[INSTALL_DIR]]
-#endif
-
-// Also get the PANDA Package file and everything that includes.
-#if $[not $[isfile $[PANDA_SOURCE]/Package.pp]]
-  #printvar PANDA_SOURCE
-  #error PANDA source directory not found from direct!  Are you attached properly?
-#endif
-
-#include $[PANDA_SOURCE]/Package.pp
-
-// Define the inter-tree dependencies.
-#define NEEDS_TREES panda $[NEEDS_TREES]
-#define DEPENDABLE_HEADER_DIRS $[DEPENDABLE_HEADER_DIRS] $[PANDA_INSTALL]/include

+ 0 - 11
direct/Sources.pp

@@ -1,11 +0,0 @@
-// This is the toplevel directory for a package.
-
-#define DIR_TYPE toplevel
-
-#define SAMPLE_SOURCE_FILE src/directbase/directbase.cxx
-#define REQUIRED_TREES dtool panda
-
-#define EXTRA_DIST \
-    Sources.pp Config.pp Package.pp
-
-#define PYTHON_PACKAGE 1

+ 0 - 7
direct/metalibs/Sources.pp

@@ -1,7 +0,0 @@
-// This is a group directory: a directory level above a number of
-// source subdirectories.
-
-#define DIR_TYPE group
-
-// The metalibs directory always depends on the src directory.
-#define DEPENDS src

+ 0 - 32
direct/metalibs/direct/Sources.pp

@@ -1,32 +0,0 @@
-// DIR_TYPE "metalib" indicates we are building a shared library that
-// consists mostly of references to other shared libraries.  Under
-// Windows, this directly produces a DLL (as opposed to the regular
-// src libraries, which don't produce anything but a pile of OBJ files
-// under Windows).
-
-#define DIR_TYPE metalib
-#define BUILDING_DLL BUILDING_DIRECT
-#define USE_PACKAGES native_net
-
-#define COMPONENT_LIBS \
-  p3directbase p3dcparser p3showbase p3deadrec p3directd p3interval p3distributed p3motiontrail p3http
-
-#define OTHER_LIBS \
-  panda:m \
-  pandaexpress:m \
-  p3parametrics:c \
-  p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-  p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-  p3express:c p3pstatclient:c p3prc:c p3pandabase:c p3linmath:c \
-  p3putil:c p3display:c p3event:c p3pgraph:c p3pgraphnodes:c \
-  p3gsgbase:c p3gobj:c p3mathutil:c \
-  p3downloader:c p3pnmimage:c p3chan:c \
-  p3pipeline:c p3cull:c \
-  $[if $[HAVE_NET],p3net:c] $[if $[WANT_NATIVE_NET],p3nativenet:c]
-
-#begin metalib_target
-  #define TARGET p3direct
-
-  #define SOURCES direct.cxx
-#end metalib_target
-

+ 0 - 4
direct/src/Sources.pp

@@ -1,4 +0,0 @@
-// This is a group directory: a directory level above a number of
-// source subdirectories.
-
-#define DIR_TYPE group

+ 0 - 3
direct/src/actor/Sources.pp

@@ -1,3 +0,0 @@
-// For now, since we are not installing Python files, this file can
-// remain empty.
-

+ 0 - 0
direct/src/cluster/Sources.pp


+ 0 - 10
direct/src/configfiles/Sources.pp

@@ -1,10 +0,0 @@
-#define INSTALL_CONFIG \
-  40_direct.prc
-
-#if $[CTPROJS]
-  // These files only matter to ctattach users.
-  #define INSTALL_CONFIG $[INSTALL_CONFIG] direct.init
-#endif
-
-
-#include $[THISDIRPREFIX]direct.prc.pp

+ 0 - 32
direct/src/configfiles/direct.prc.pp

@@ -1,32 +0,0 @@
-//
-// direct.prc.pp
-//
-// This file defines the script to auto-generate direct.prc at
-// ppremake time.  This is intended to fill in some of the default
-// parameters, in particular the default display types.
-//
-
-#output 40_direct.prc notouch
-#### Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]].
-################################# DO NOT EDIT ###########################
-
-#if $[CTPROJS]
-// If we are using the ctattach tools, append "built"--this is a VR
-// Studio convention.
-model-path      $DMODELS/built
-sound-path      $DMODELS/built
-#else
-// Most people outside the VR Studio just see this.
-model-path      $DMODELS
-sound-path      $DMODELS
-#endif
-
-# Define a new egg object type.  See the comments in _panda.prc about this.
-
-egg-object-type-direct-widget   <Scalar> collide-mask { 0x80000000 } <Collide> { Polyset descend }
-
-# Define a new cull bin that will render on top of everything else.
-
-cull-bin gui-popup 60 unsorted
-
-#end 40_direct.prc

+ 0 - 0
direct/src/controls/Sources.pp


+ 0 - 23
direct/src/dcparse/Sources.pp

@@ -1,23 +0,0 @@
-#define LOCAL_LIBS \
-  p3dcparser
-#define OTHER_LIBS \
-  p3express:c pandaexpress:m \
-  p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-  p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-  p3prc:c p3pstatclient:c p3pandabase:c p3linmath:c p3putil:c \
-  p3pipeline:c p3downloader:c \
-  $[if $[HAVE_NET],p3net:c] $[if $[WANT_NATIVE_NET],p3nativenet:c] \
-  panda:m \
-  p3pystub
-
-#define C++FLAGS -DWITHIN_PANDA
-
-#begin bin_target
-  #define TARGET dcparse
-  #define USE_PACKAGES zlib openssl tar
-
-  #define SOURCES \
-    dcparse.cxx
-  #define WIN_SYS_LIBS shell32.lib
-#end bin_target
-

+ 0 - 64
direct/src/dcparser/Sources.pp

@@ -1,64 +0,0 @@
-#define OTHER_LIBS \
-    p3express:c pandaexpress:m \
-    p3pstatclient:c p3pipeline:c panda:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3dtoolutil:c p3dtoolbase:c p3dtool:m p3prc:c p3pandabase:c \
-    p3downloader:c $[if $[HAVE_NET],p3net:c] $[if $[WANT_NATIVE_NET],p3nativenet:c] \
-    p3linmath:c p3putil:c
-
-#define LOCAL_LIBS \
-    p3directbase
-#define YACC_PREFIX dcyy
-#define C++FLAGS -DWITHIN_PANDA
-#define UNIX_SYS_LIBS m
-#define USE_PACKAGES python
-
-#begin lib_target
-  #define TARGET p3dcparser
-
-  #define COMBINED_SOURCES $[TARGET]_composite1.cxx  $[TARGET]_composite2.cxx
-
-  #define SOURCES \
-     dcAtomicField.h dcAtomicField.I dcClass.h dcClass.I \
-     dcDeclaration.h \
-     dcField.h dcField.I \
-     dcFile.h dcFile.I \
-     dcKeyword.h dcKeywordList.h \
-     dcLexer.lxx  \
-     dcLexerDefs.h dcMolecularField.h dcParser.yxx dcParserDefs.h  \
-     dcSubatomicType.h \
-     dcPackData.h dcPackData.I \
-     dcPacker.h dcPacker.I \
-     dcPackerCatalog.h dcPackerCatalog.I \
-     dcPackerInterface.h dcPackerInterface.I \
-     dcParameter.h dcClassParameter.h dcArrayParameter.h \
-     dcSimpleParameter.h dcSwitchParameter.h \
-     dcNumericRange.h dcNumericRange.I \
-     dcSwitch.h \
-     dcTypedef.h \
-     dcPython.h \
-     dcbase.h dcindent.h hashGenerator.h  \
-     primeNumberGenerator.h  
-
-  #define INCLUDED_SOURCES \
-     dcAtomicField.cxx dcClass.cxx \
-     dcDeclaration.cxx \
-     dcField.cxx dcFile.cxx \
-     dcKeyword.cxx dcKeywordList.cxx \
-     dcMolecularField.cxx dcSubatomicType.cxx \
-     dcPackData.cxx \
-     dcPacker.cxx \
-     dcPackerCatalog.cxx \
-     dcPackerInterface.cxx \
-     dcParameter.cxx dcClassParameter.cxx dcArrayParameter.cxx \
-     dcSimpleParameter.cxx dcSwitchParameter.cxx \
-     dcSwitch.cxx \
-     dcTypedef.cxx \
-     dcindent.cxx  \
-     hashGenerator.cxx primeNumberGenerator.cxx 
-
-  //  #define SOURCES $[SOURCES] $[INCLUDED_SOURCES]
-  //  #define COMBINED_SOURCES
-
-  #define IGATESCAN all
-#end lib_target

+ 0 - 28
direct/src/deadrec/Sources.pp

@@ -1,28 +0,0 @@
-#begin lib_target
-  #define TARGET p3deadrec
-  #define LOCAL_LIBS \
-    p3directbase
-  #define OTHER_LIBS \
-    p3express:c p3linmath:c \
-    p3interrogatedb:c p3dconfig:c \
-    p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3prc:c p3pandabase:c p3putil:c \
-    p3pipeline:c
-
-  #define COMBINED_SOURCES $[TARGET]_composite1.cxx
-
-  #define SOURCES \
-    config_deadrec.h \
-    smoothMover.h smoothMover.I
-  
-  #define INCLUDED_SOURCES \  
-    config_deadrec.cxx \
-    smoothMover.cxx
-
-  #define INSTALL_HEADERS \
-    config_deadrec.h \
-    smoothMover.h smoothMover.I
-
-  #define IGATESCAN \
-    all
-#end lib_target

+ 0 - 20
direct/src/directbase/Sources.pp

@@ -1,20 +0,0 @@
-#define USE_PACKAGES cg  // from gobj.
-
-#begin lib_target
-  #define TARGET p3directbase
-  
-  #define SOURCES \
-    directbase.cxx directbase.h directsymbols.h \
-
-  #define INSTALL_HEADERS \
-    directbase.h directsymbols.h
-
-  // These libraries and frameworks are used by dtoolutil; we redefine
-  // them here so they get into the panda build system.
-  #if $[ne $[PLATFORM], FreeBSD]
-    #define UNIX_SYS_LIBS dl
-  #endif
-  #define WIN_SYS_LIBS shell32.lib
-  #define OSX_SYS_FRAMEWORKS Foundation $[if $[not $[BUILD_IPHONE]],AppKit]
-
-#end lib_target

+ 0 - 23
direct/src/directd/Sources.pp

@@ -1,23 +0,0 @@
-
-// This package presently only builds on Windows.
-// We also require the network layer (queuedConnectionManager, etc.)
-#define BUILD_DIRECTORY $[and $[WINDOWS_PLATFORM],$[HAVE_NET],$[HAVE_DIRECTD]]
-
-#define LOCAL_LIBS \
-    p3directbase
-#define OTHER_LIBS \
-    $[if $[HAVE_NET],p3net:c] p3linmath:c p3putil:c p3express:c panda:m pandaexpress:m p3dtoolconfig p3dtool
-#define WIN_SYS_LIBS $[WIN_SYS_LIBS] user32.lib //advapi32.lib
-
-#begin lib_target
-  #define TARGET p3directd
-
-  #define SOURCES \
-    directd.h directd.cxx
-  
-  #define INSTALL_HEADERS \
-    directd.h
-
-  #define IGATESCAN directd.h
-
-#end lib_target

+ 0 - 30
direct/src/directdServer/Sources.pp

@@ -1,30 +0,0 @@
-
-// This package presently only builds on Windows.
-// We also require the network layer (queuedConnectionManager, etc.)
-#define BUILD_DIRECTORY $[and $[WINDOWS_PLATFORM],$[HAVE_NET],$[HAVE_DIRECTD]]
-
-//#define LOCAL_LIBS \
-//    p3directbase
-#define OTHER_LIBS \
-    $[if $[HAVE_NET],p3net:c] p3linmath:c p3putil:c p3express:c panda:m pandaexpress:m p3dtoolconfig p3dtool
-#define WIN_SYS_LIBS $[WIN_SYS_LIBS] user32.lib //advapi32.lib
-
-#begin bin_target
-  #define TARGET p3directdServer
-  #define LOCAL_LIBS p3directd
-  #define OTHER_LIBS $[OTHER_LIBS] p3pystub
-
-  #define SOURCES \
-    directdServer.cxx directdServer.h
-
-#end bin_target
-
-#begin test_bin_target
-  #define TARGET directdClient
-  #define LOCAL_LIBS p3directd
-  #define OTHER_LIBS $[OTHER_LIBS] p3pystub
-
-  #define SOURCES \
-    directdClient.cxx directdClient.h
-
-#end test_bin_target

+ 0 - 0
direct/src/directdevices/Sources.pp


+ 0 - 0
direct/src/directnotify/Sources.pp


+ 0 - 8
direct/src/directscripts/Sources.pp

@@ -1,8 +0,0 @@
-#if $[and $[CTPROJS],$[WINDOWS_PLATFORM]]
-  // This script is only useful if you're using the ctattach script on
-  // Windows; therefore, we only bother to install it if you're using
-  // the cattach scripts.
-  #define INSTALL_SCRIPTS runPythonEmacs
-
-#endif
-

+ 0 - 0
direct/src/directtools/Sources.pp


+ 0 - 0
direct/src/directutil/Sources.pp


+ 0 - 25
direct/src/distributed/Sources.pp

@@ -1,25 +0,0 @@
-#define C++FLAGS -DWITHIN_PANDA
-
-#begin lib_target
-  #define BUILD_TARGET $[HAVE_PYTHON]
-  #define USE_PACKAGES openssl native_net net
-
-  #define TARGET p3distributed
-  #define LOCAL_LIBS \
-    p3directbase p3dcparser
-  #define OTHER_LIBS \
-    p3event:c p3downloader:c panda:m p3express:c pandaexpress:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3prc:c p3pstatclient:c p3pandabase:c p3linmath:c p3putil:c \
-    p3pipeline:c $[if $[HAVE_NET],p3net:c] $[if $[WANT_NATIVE_NET],p3nativenet:c]
-
-  #define SOURCES \
-    config_distributed.cxx config_distributed.h \
-    cConnectionRepository.cxx cConnectionRepository.I \
-    cConnectionRepository.h \
-    cDistributedSmoothNodeBase.cxx cDistributedSmoothNodeBase.I \
-    cDistributedSmoothNodeBase.h
-
-  #define IGATESCAN all
-#end lib_target

+ 0 - 20
direct/src/extensions_native/Sources.pp

@@ -1,20 +0,0 @@
-#if $[OSX_PLATFORM]
-#output extensions_darwin.py
-#### Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]].
-################################# DO NOT EDIT ###########################
-
-# This defines the shared-library filename extension that is built and
-# imported on OSX.  It's normally .dylib, but in certain Python and
-# OSX versions (for instance, Python 2.4 on OSX 10.4), it appears that
-# we need to generate and import .so files instead, since Python won't
-# import the .dylibs directly.  This is controlled via the BUNDLE_EXT
-# variable defined in Config.pp.
-#if $[BUNDLE_EXT]
-dll_ext = "$[BUNDLE_EXT]"
-#else
-dll_ext = ".dylib"
-#endif
-
-#end extensions_darwin.py
-
-#endif  // OSX_PLATFORM

+ 3 - 115
direct/src/extensions_native/extension_native_helpers.py

@@ -1,122 +1,10 @@
 ###  Tools
-__all__ = ["Dtool_ObjectToDict", "Dtool_funcToMethod", "Dtool_PreloadDLL"]
+__all__ = ["Dtool_ObjectToDict", "Dtool_funcToMethod"]
 
 import imp, sys, os
 
-# The following code exists to work around a problem that exists
-# with Python 2.5 or greater.
-
-# Specifically, Python 2.5 is designed to import files named *.pyd
-# only; it will not import files named *.dll (or *.so).  We work
-# around this problem by explicitly preloading all of the dll's we
-# expect to need.
-
-dll_suffix = ''
-if sys.platform == "win32":
-    # On Windows, dynamic libraries end in ".dll".
-    dll_ext = '.dll'
-    module_ext = '.pyd'
-
-    # We allow the caller to preload dll_suffix into the sys module.
-    dll_suffix = getattr(sys, 'dll_suffix', None)
-
-    if dll_suffix is None:
-        # Otherwise, we try to determine it from the executable name:
-        # python_d.exe implies _d across the board.
-        dll_suffix = ''
-        if sys.executable.endswith('_d.exe'):
-            dll_suffix = '_d'
-            
-elif sys.platform == "darwin":
-    # On OSX, the dynamic libraries usually end in .dylib, but
-    # sometimes we need .so.
-    try:
-        from direct.extensions_native.extensions_darwin import dll_ext
-    except ImportError:
-        dll_ext = '.dylib'
-    module_ext = '.so'
-else:
-    # On most other UNIX systems (including linux), .so is used.
-    dll_ext = '.so'
-    module_ext = '.so'
-
-if sys.platform == "win32":
-    # On Windows, we must furthermore ensure that the PATH is modified
-    # to locate all of the DLL files.
-
-    # First, search for the directory that contains all of our compiled
-    # modules.
-    target = None
-    filename = "libpandaexpress%s%s" % (dll_suffix, dll_ext)
-    for dir in sys.path + [sys.prefix]:
-        lib = os.path.join(dir, filename)
-        if (os.path.exists(lib)):
-            target = dir
-    if target == None:
-        message = "Cannot find %s" % (filename)
-        raise ImportError(message)
-
-    # And add that directory to the system path.
-    path = os.environ["PATH"]
-    if not path.startswith(target + ";"):
-        os.environ["PATH"] = target + ";" + path
-
-def Dtool_FindModule(module):
-    # Finds a .pyd module on the Python path.
-    filename = module.replace('.', os.path.sep) + module_ext
-    for dir in sys.path:
-        lib = os.path.join(dir, filename)
-        if (os.path.exists(lib)):
-            return lib
-
-    return None
-
-def Dtool_PreloadDLL(module):
-    if module in sys.modules:
-        return
-
-    # First find it as a .pyd module on the Python path.
-    if Dtool_FindModule(module):
-        # OK, we should have no problem importing it as is.
-        return
-
-    # Nope, we'll need to search for a dynamic lib and preload it.
-    # Search for the appropriate directory.
-    target = None
-    filename = module.replace('.', os.path.sep) + dll_suffix + dll_ext
-    for dir in sys.path + [sys.prefix]:
-        lib = os.path.join(dir, filename)
-        if (os.path.exists(lib)):
-            target = dir
-            break
-
-    if target is None:
-        message = "DLL loader cannot find %s." % (module)
-        raise ImportError(message)
-
-    # Now import the file explicitly.
-    pathname = os.path.join(target, filename)
-    imp.load_dynamic(module, pathname)    
-
-# Nowadays, we can compile libpandaexpress with libpanda into a
-# .pyd file called panda3d/core.pyd which can be imported without
-# any difficulty.  Let's see if this is the case.
-
-# In order to support things like py2exe that play games with the
-# physical python files on disk, we can't entirely rely on
-# Dtool_FindModule to find our panda3d.core module.  However, we
-# should be able to import it.  To differentiate the old-style Panda
-# build (with .dll's) from the new-style Panda build (with .pyd's), we
-# first try to import panda3d.core directly; if it succeeds we're in a
-# new-style build, and if it fails we must be in an old-style build.
-try:
-    from panda3d.core import *
-except ImportError:
-    Dtool_PreloadDLL("libpandaexpress")
-    from libpandaexpress import *
-
 def Dtool_ObjectToDict(cls, name, obj):
-    cls.DtoolClassDict[name] = obj;
+    cls.DtoolClassDict[name] = obj
 
 def Dtool_funcToMethod(func, cls, method_name=None):
     """Adds func to class so it is an accessible method; use method_name to specify the name to be used for calling the method.
@@ -127,4 +15,4 @@ def Dtool_funcToMethod(func, cls, method_name=None):
     func.im_self = None
     if not method_name:
         method_name = func.__name__
-    cls.DtoolClassDict[method_name] = func;
+    cls.DtoolClassDict[method_name] = func

+ 0 - 3
direct/src/ffi/.gitignore

@@ -1,3 +0,0 @@
-/genPyCode
-/genPyCode.bat
-/genPyCode.py

+ 0 - 344
direct/src/ffi/DoGenPyCode.py

@@ -1,344 +0,0 @@
-""" This module implements genPyCode, which is itself a generated
-script with a few default parameters filled in.  This module allows
-the user to specify alternate parameters on the command line. """
-
-import getopt
-import sys
-import os
-import time
-from direct.ffi import FFIConstants
-
-# Define a help string for the user
-helpString ="""
-genPyCode -h
-genPyCode
-genPyCode [opts] -i libdtoolconfig libcode1 libcode2 ...
-
-This script generates Python wrappers to interface with the C++
-libraries that have already been run through interrogate.  It is
-necessary to run this script after building the Panda tools for the
-first time, or after any major change in which some of the interface
-may have changed.
-
-The default options are baked into genPyCode by ppremake and need not
-be specified.  However, it is possible to override these on the
-command line if you need to fine-tune the behavior of genPyCode for
-some reason.  Most often, the only needed change will be to add one or
-more additional libraries to the list of libraries instrumented by
-default.
-
-
-Options:
-  -h          print this message
-  -v          verbose
-  -d          generate HTML documentation too
-  -C dir      directory to write output code
-  -H dir      directory to write output HTML
-  -x dir      directory to pull extension code from
-  -i lib      interrogate library
-  -e dir      directory to search for *.in files (may be repeated)
-  -p dir      directory to search for Python source files (may be repeated)
-  -r          remove the default library list; instrument only named libraries
-  -O          no C++ comments or assertion statements
-  -n          Don't use squeezeTool to squeeze the result into one .pyz file
-  -s          Don't delete source files after squeezing
-
-Any additional names listed on the command line are taken to be names
-of libraries that are to be instrumented.
-
-"""
-
-HTMLHeader = """
-<html>
-<head>
-<title>Panda3D documentation generated %s</title>
-</head>
-<body>
-"""
-
-HTMLFooter = """
-</body>
-</html>
-"""
-
-# Initialize variables
-outputCodeDir = ''
-outputHTMLDir = ''
-directDir = ''
-extensionsDir = ''
-interrogateLib = ''
-codeLibs = []
-etcPath = []
-pythonSourcePath = []
-doSqueeze = True
-deleteSourceAfterSqueeze = True
-doHTML = False
-native = False  # This is set by genPyCode.py
-
-def doGetopts():
-    global outputCodeDir
-    global outputHTMLDir
-    global extensionsDir
-    global interrogateLib
-    global codeLibs
-    global doSqueeze
-    global deleteSourceAfterSqueeze
-    global doHTML
-    global etcPath
-    global pythonSourcePath
-
-    # These options are allowed but are flagged as warnings (they are
-    # deprecated with the new genPyCode script):
-
-    # -g adds libgateway
-    # -t adds libtoontown
-    # -p adds libpirates
-    # -o adds libopt
-
-    FFIConstants.notify.setDebug(0)
-    FFIConstants.notify.setInfo(0)
-
-    # Extract the args the user passed in
-    try:
-        opts, pargs = getopt.getopt(sys.argv[1:], 'hvdOC:H:x:Ni:e:p:rns')
-    except e:
-        # User passed in a bad option, print the error and the help, then exit
-        print(e)
-        print(helpString)
-        sys.exit()
-
-    # Store the option values into our variables
-    for opt in opts:
-        flag, value = opt
-        if (flag == '-h'):
-            print(helpString)
-            sys.exit()
-        elif (flag == '-v'):
-            if not FFIConstants.notify.getInfo():
-                FFIConstants.notify.setInfo(1)
-            else:
-                FFIConstants.notify.setDebug(1)
-        elif (flag == '-d'):
-            doHTML = True
-        elif (flag == '-C'):
-            outputCodeDir = value
-        elif (flag == '-H'):
-            outputHTMLDir = value
-        elif (flag == '-x'):
-            extensionsDir = value
-        elif (flag == '-i'):
-            interrogateLib = value
-        elif (flag == '-e'):
-            etcPath.append(value)
-        elif (flag == '-p'):
-            pythonSourcePath.append(value)
-        elif (flag == '-r'):
-            codeLibs = []
-        elif (flag == '-O'):
-            FFIConstants.wantComments = 0
-            FFIConstants.wantTypeChecking = 0
-        elif (flag == '-n'):
-            doSqueeze = False
-        elif (flag == '-s'):
-            deleteSourceAfterSqueeze = False
-
-        else:
-            FFIConstants.notify.error('illegal option: ' + flag)
-
-    # Check for old, no-longer-used parameter:
-    invalidParameters = [
-        'linux', 'win-debug', 'win-release', 'win-publish',
-        'install', 'release'
-        ]
-    if pargs and pargs[0] in invalidParameters:
-        FFIConstants.notify.warning("parameter is deprecated: %s" % (pargs[0]))
-        del pargs[0]
-
-    # Store the program arguments into the codeLibs
-    for arg in pargs:
-        arg = arg.strip()
-        if arg:
-            codeLibs.append(arg)
-
-    # Make sure each name appears on codeLibs exactly once.
-    newLibs = []
-    for codeLib in codeLibs:
-        if codeLib not in newLibs:
-            newLibs.append(codeLib)
-    codeLibs = newLibs
-
-
-def doErrorCheck():
-    global outputCodeDir
-    global outputHTMLDir
-    global extensionsDir
-    global interrogateLib
-    global codeLibs
-    global doSqueeze
-    global etcPath
-
-    # Now do some error checking and verbose output
-    if (not interrogateLib):
-        FFIConstants.notify.error('You must specify an interrogate library (-i lib)')
-    else:
-        FFIConstants.notify.debug('Setting interrogate library to: ' + interrogateLib)
-        FFIConstants.InterrogateModuleName = interrogateLib
-
-    if (not outputCodeDir):
-        FFIConstants.notify.info('Setting output code directory to current directory')
-        outputCodeDir = '.'
-    elif (not os.path.exists(outputCodeDir)):
-        FFIConstants.notify.info('Directory does not exist, creating: ' + outputCodeDir)
-        os.mkdir(outputCodeDir)
-        FFIConstants.notify.info('Setting output code directory to: ' + outputCodeDir)
-    else:
-        FFIConstants.notify.info('Setting output code directory to: ' + outputCodeDir)
-
-    if doHTML:
-        if (not outputHTMLDir):
-            FFIConstants.notify.info('Setting output HTML directory to current directory')
-            outputHTMLDir = '.'
-        elif (not os.path.exists(outputHTMLDir)):
-            FFIConstants.notify.info('Directory does not exist, creating: ' + outputHTMLDir)
-            os.makedirs(outputHTMLDir)
-            FFIConstants.notify.info('Setting output HTML directory to: ' + outputHTMLDir)
-        else:
-            FFIConstants.notify.info('Setting output HTML directory to: ' + outputHTMLDir)
-
-
-    if (not extensionsDir):
-        FFIConstants.notify.debug('Setting extensions directory to current directory')
-        extensionsDir = '.'
-    elif (not os.path.exists(extensionsDir)):
-        FFIConstants.notify.error('Directory does not exist: ' + extensionsDir)
-    else:
-        FFIConstants.notify.debug('Setting extensions directory to: ' + extensionsDir)
-
-
-    if (not codeLibs):
-        FFIConstants.notify.error('You must specify one or more libraries to generate code from')
-    else:
-        FFIConstants.notify.debug('Generating code for: ' + repr(codeLibs))
-        FFIConstants.CodeModuleNameList = codeLibs
-
-def generateNativeWrappers():
-    from direct.extensions_native.extension_native_helpers import Dtool_PreloadDLL
-
-    # Empty out the output directories of unnecessary crud from
-    # previous runs before we begin.
-    for file in os.listdir(outputCodeDir):
-        pathname = os.path.join(outputCodeDir, file)
-        if not os.path.isdir(pathname):
-            os.unlink(pathname)
-
-    # Generate __init__.py
-    initFilename = os.path.join(outputCodeDir, '__init__.py')
-    init = open(initFilename, 'w')
-
-    # Generate PandaModules.py
-    pandaModulesFilename = os.path.join(outputCodeDir, 'PandaModules.py')
-    pandaModules = open(pandaModulesFilename, 'w')
-
-    # Copy in any helper classes from the extensions_native directory
-    extensionHelperFiles = ['extension_native_helpers.py']
-    for name in extensionHelperFiles:
-        inFilename = os.path.join(extensionsDir, name)
-        outFilename = os.path.join(outputCodeDir, name)
-        if os.path.exists(inFilename):
-            inFile = open(inFilename, 'r')
-            outFile = open(outFilename, 'w')
-            outFile.write(inFile.read())
-
-    # Generate a series of "libpandaModules.py" etc. files, one for
-    # each named module.
-    for moduleName in FFIConstants.CodeModuleNameList:
-        print('Importing code library: ' + moduleName)
-        Dtool_PreloadDLL(moduleName)
-
-        __import__(moduleName)
-        module = sys.modules[moduleName]
-
-        # Make a suitable meta module name
-        metaModuleName = ""
-        nextCap = False
-        for ch in moduleName:
-            if ch == '.':
-                nextCap = True
-            elif nextCap:
-                metaModuleName += ch.upper()
-                nextCap = False
-            else:
-                metaModuleName += ch
-        metaModuleName += "Modules"
-
-        # Wrap the import in a try..except so that we can continue if
-        # the library isn't present.  This is particularly necessary
-        # in the runtime (plugin) environment, where all libraries are
-        # not necessarily downloaded.
-        if sys.version_info >= (3, 0):
-            pandaModules.write('try:\n    from .%s import *\nexcept ImportError as err:\n    if "DLL loader cannot find" not in str(err):\n        raise\n' % (metaModuleName))
-        else:
-            pandaModules.write('try:\n    from %s import *\nexcept ImportError, err:\n    if "DLL loader cannot find" not in str(err):\n        raise\n' % (metaModuleName))
-
-        # Not sure if this message is helpful or annoying.
-        #pandaModules.write('  print("Failed to import %s")\n' % (moduleName))
-        pandaModules.write('\n')
-
-        moduleModulesFilename = os.path.join(outputCodeDir, '%s.py' % (metaModuleName))
-        moduleModules = open(moduleModulesFilename, 'w')
-
-        if sys.version_info >= (3, 0):
-            moduleModules.write('from .extension_native_helpers import *\n')
-        else:
-            moduleModules.write('from extension_native_helpers import *\n')
-        moduleModules.write('Dtool_PreloadDLL("%s")\n' % (moduleName))
-
-        moduleModules.write('from %s import *\n\n' % (moduleName))
-
-        # Now look for extensions
-        for className, classDef in module.__dict__.items():
-            if isinstance(classDef, type):
-                extensionFilename = os.path.join(extensionsDir, '%s_extensions.py' % (className))
-                if os.path.exists(extensionFilename):
-                    print('  Found extensions for class: %s' % (className))
-                    extension = open(extensionFilename, 'r')
-                    moduleModules.write(extension.read())
-                    moduleModules.write('\n')
-
-
-def run():
-    global outputCodeDir
-    global outputHTMLDir
-    global directDir
-    global extensionsDir
-    global interrogateLib
-    global codeLibs
-    global doSqueeze
-    global deleteSourceAfterSqueeze
-    global etcPath
-    global pythonSourcePath
-
-    doGetopts()
-    doErrorCheck()
-
-    # Ok, now we can start generating code
-    if native:
-        generateNativeWrappers()
-
-    else:
-        from direct.ffi import FFIInterrogateDatabase
-        db = FFIInterrogateDatabase.FFIInterrogateDatabase(etcPath = etcPath)
-        db.generateCode(outputCodeDir, extensionsDir)
-
-        if doSqueeze:
-            db.squeezeGeneratedCode(outputCodeDir, deleteSourceAfterSqueeze)
-
-    if doHTML:
-        from direct.directscripts import gendocs
-        from panda3d.core import PandaSystem
-        versionString = '%s %s' % (
-            PandaSystem.getDistributor(), PandaSystem.getVersionString())
-
-        gendocs.generate(versionString, etcPath, pythonSourcePath,
-                         outputHTMLDir, HTMLHeader % time.asctime(),
-                         HTMLFooter, '', '.html')

+ 0 - 24
direct/src/ffi/FFIConstants.py

@@ -1,24 +0,0 @@
-
-# create a DirectNotify category for FFI modules
-from direct.directnotify.DirectNotifyGlobal import *
-notify = directNotify.newCategory("FFI")
-
-# This is the name of the file that the importing code will be stored
-importModuleName = 'PandaModules'
-
-# A header for all the generated files
-generatedHeader = '# This file is automatically generated. It would be unwise to edit.\n\n'
-
-# These modules should come from somewhere outside this program
-# Maybe in an environment variable, or by looking at what you are
-# attached to?
-CodeModuleNameList = []
-
-# This is the module that contains the interrogate functions
-InterrogateModuleName = None
-
-# Should FFI output C++ comments with the source code?
-wantComments = 1
-
-# Should FFI output type assertions?
-wantTypeChecking = 1

+ 0 - 32
direct/src/ffi/FFIEnvironment.py

@@ -1,32 +0,0 @@
-import FFIConstants
-
-class FFIEnvironment:
-    def __init__(self):
-        self.reset()
-
-    def reset(self):
-        self.types = {}
-        self.globalFunctions = []
-        self.downcastFunctions = []
-        self.globalValues = []
-        self.manifests = []
-    
-    def addType(self, typeDescriptor, name):
-        if name in self.types:
-            FFIConstants.notify.info('Redefining type named: ' + name)
-        self.types[name] = typeDescriptor
-    
-    def getTypeNamed(self, name):
-        try:
-            self.types[name]
-        except KeyError:
-            raise 'Type not found in FFIEnvironment'
-    
-    def addGlobalFunction(self, typeDescriptors):
-        self.globalFunctions.extend(typeDescriptors)
-    def addDowncastFunction(self, typeDescriptor):
-        self.downcastFunctions.append(typeDescriptor)
-    def addGlobalValue(self, typeDescriptor):
-        self.globalValues.append(typeDescriptor)
-    def addManifest(self, typeDescriptor):
-        self.manifests.append(typeDescriptor)

+ 0 - 268
direct/src/ffi/FFIExternalObject.py

@@ -1,268 +0,0 @@
-from new import instance
-import FFIConstants
-
-WrapperClassMap = {}
-
-DowncastMap = {}
-
-# For testing, you can turn verbose and debug on
-# FFIConstants.notify.setInfo(1)
-# FFIConstants.notify.setDebug(1)
-
-# Uncomment the notify statements if you need to debug,
-# otherwise leave them commented out to prevent runtime
-# overhead of calling them
-
-
-
-# Register a python class in the type map if it is a typed object
-# The type map is used for upcasting and downcasting through
-# the panda inheritance chain
-def registerInTypeMap(pythonClass):
-    from pandac import TypedObject
-    if issubclass(pythonClass, TypedObject.TypedObject):
-        typeIndex = pythonClass.getClassType().getIndex()
-        WrapperClassMap[typeIndex] = pythonClass
-
-
-def funcToMethod(func, clas, method_name=None):
-    """Adds func to class so it is an accessible method; use method_name to specify the name to be used for calling the method.
-    The new method is accessible to any instance immediately."""
-    func.im_class=clas
-    func.im_func=func
-    func.im_self=None
-    if not method_name:
-        clas.__dict__[method_name]=func
-    else:
-        clas.__dict__[func.__name__]=func
-
-
-def FFIInstance(classdef, this = 0, userManagesMemory = 0):
-    answer = instance(classdef)
-    answer.this = this
-    answer.userManagesMemory = userManagesMemory
-    return answer
-
-class FFIExternalObject:
-    def __init__(self, *_args):
-        # By default, we do not manage our own memory
-        self.userManagesMemory = 0
-        # Start with a null this pointer
-        self.this = 0
-
-    def destructor(self):
-        # Base destructor in case you do not define one
-        pass
-
-    def getLineage(self, thisClass, targetBaseClass):
-        # Recursively determine the path in the heirarchy tree from thisClass
-        # to the targetBaseClass
-        return self.getLineageInternal(thisClass, targetBaseClass, [thisClass])
-
-    def getLineageInternal(self, thisClass, targetBaseClass, chain):
-        # Recursively determine the path in the heirarchy tree from thisClass
-        # to the targetBaseClass
-        #FFIConstants.notify.debug('getLineageInternal: checking %s to %s'
-        #                          % (thisClass.__name__, targetBaseClass.__name__))
-        if (targetBaseClass in thisClass.__bases__):
-            # Found a link
-            return chain + [targetBaseClass]
-        elif (len(thisClass.__bases__) == 0):
-            # No possible links
-            return 0
-        else:
-            # recurse
-            for base in thisClass.__bases__:
-                res = self.getLineageInternal(base, targetBaseClass, chain+[base])
-                if res:
-                    # FFIConstants.notify.debug('getLineageInternal: found path: ' + repr(res))
-                    return res
-            # Not found anywhere
-            return 0
-
-    def getDowncastFunctions(self, thisClass, baseClass):
-        #FFIConstants.notify.debug(
-        #    'getDowncastFunctions: Looking for downcast function from %s to %s'
-        #    % (baseClass.__name__, thisClass.__name__))
-        lineage = self.getLineage(thisClass, baseClass)
-        # Start with an empty list of downcast functions
-        downcastFunctionList = []
-
-        # If it could not find the baseClass anywhere in the lineage,
-        # return empty
-        if not lineage:
-            return []
-
-        # Walk along the lineage looking for downcast functions from
-        # class to class+1.  Start at the top and work downwards.
-        top = len(lineage) - 1
-        for i in range(top):
-            toClass = lineage[top - i - 1]
-            fromClass = lineage[top - i]
-            downcastFuncName = ('downcastTo' + toClass.__name__
-                                + 'From' + fromClass.__name__)
-            # Look over this classes global modules dictionaries
-            # for the downcast function name
-            for globmod in toClass.__CModuleDowncasts__:
-                func = globmod.__dict__.get(downcastFuncName)
-                if func:
-                    #FFIConstants.notify.debug(
-                    #    'getDowncastFunctions: Found downcast function %s in %s'
-                    #    % (downcastFuncName, globmod.__name__))
-                    downcastFunctionList.append(func)
-        return downcastFunctionList
-
-    def lookUpNewType(self, typeHandle, rootType):
-        # We tried to downcast to an unknown type.  Try to figure out
-        # the lowest type we *do* know, so we can downcast to that
-        # type instead.
-        if typeHandle.getNumParentClasses() == 0:
-            # This type has no parents!  That shouldn't happen.
-            FFIConstants.notify.warning("Unknown class type: %s has no parents!" % (typeHandle.getName()))
-            return None
-
-        parentType = typeHandle.getParentTowards(rootType, self)
-        parentIndex = parentType.getIndex()
-        parentWrapperClass = WrapperClassMap.get(parentIndex)
-        if parentWrapperClass == None:
-            parentWrapperClass = self.lookUpNewType(parentType, rootType)
-
-        if parentWrapperClass != None:
-            # If the parent class is known, then record that this
-            # class is a derivation of that parent class.
-            WrapperClassMap[typeHandle.getIndex()] = parentWrapperClass
-
-        return parentWrapperClass
-
-    def setPointer(self):
-        # See what type it really is and downcast to that type (if necessary)
-        # Look up the TypeHandle in the dict. get() returns None if it is not there
-        index = self.getTypeIndex()
-        exactWrapperClass = WrapperClassMap.get(index)
-        if exactWrapperClass == None:
-            # This is an unknown class type.  Perhaps it derives from
-            # a class type we know.
-            exactWrapperClass = self.lookUpNewType(self.getType(), self.getClassType())
-
-        # We do not need to downcast if we already have the same class
-        if (exactWrapperClass and (exactWrapperClass != self.__class__)):
-            # Create a new wrapper class instance
-            #exactObject = exactWrapperClass(None)
-            exactObject = FFIInstance(exactWrapperClass)
-            # Get the downcast pointer that has had all the downcast
-            # funcs called
-            downcastObject = self.downcast(exactWrapperClass)
-            exactObject.this = downcastObject.this
-            exactObject.userManagesMemory = downcastObject.userManagesMemory
-            # Make sure the original downcast object does not get
-            # garbage collected so that the exactObject will not get
-            # gc'd thereby transferring ownership of the object to
-            # this new exactObject
-            downcastObject.userManagesMemory = 0
-            return exactObject
-        else:
-            return self
-
-    def downcast(self, toClass):
-        fromClass = self.__class__
-        #FFIConstants.notify.debug('downcast: downcasting from %s to %s' % \
-        #    (fromClass.__name__, toClass.__name__))
-
-        # Check the cache to see if we have looked this up before
-        downcastChain = DowncastMap.get((fromClass, toClass))
-        if downcastChain == None:
-            downcastChain = self.getDowncastFunctions(toClass, fromClass)
-            #FFIConstants.notify.debug('downcast: computed downcast chain: ' + repr(downcastChain))
-            # Store it for next time
-            DowncastMap[(fromClass, toClass)] = downcastChain
-        newObject = self
-        for downcastFunc in downcastChain:
-            #FFIConstants.notify.debug('downcast: downcasting %s using %s' % \
-            #                         (newObject.__class__.__name__, downcastFunc))
-            newObject = downcastFunc(newObject)
-        return newObject
-
-    def compareTo(self, other):
-        # By default, we compare the C++ pointers
-        # Some classes will override the compareTo operator with their own
-        # logic in C++ (like vectors and matrices for instance)
-        try:
-            if self.this < other.this:
-                return -1
-            if self.this > other.this:
-                return 1
-            else:
-                return 0
-        except:
-            return 1
-
-    def __cmp__(self, other):
-        # Only use the C++ compareTo if they are the same class
-        if isinstance(other, self.__class__):
-            return self.compareTo(other)
-        # Otherwise, they must not be the same
-        # Just do a basic python id compare
-        else:
-            return cmp(id(self), id(other))
-
-    def __repr__(self):
-        # Lots of Panda classes have an output function defined that takes an Ostream
-        # We create a LineStream for the output function to write to, then we extract
-        # the string out of it and return it as our str
-        try:
-            from pandac import LineStream
-            lineStream = LineStream.LineStream()
-            self.output(lineStream)
-            baseRepr = lineStream.getLine()
-        except AssertionError, e:
-            raise AssertionError, e
-        except:
-            baseRepr = ('[' + self.__class__.__name__ + ' at: ' + repr(self.this) + ']')
-        # In any case, return the baseRepr
-        return baseRepr
-
-    def __str__(self):
-        # This is a more complete version of printing which shows the object type
-        # and pointer, plus the output from write() or output() whichever is defined
-        # Print this info for all objects
-        baseRepr = ('[' + self.__class__.__name__ + ' at: ' + repr(self.this) + ']')
-        # Lots of Panda classes have an write or output function defined that takes an Ostream
-        # We create a LineStream for the write or output function to write to, then we extract
-        # the string out of it and return it as our repr
-        from pandac import LineStream
-        lineStream = LineStream.LineStream()
-        try:
-            # First try the write function, that is the better one
-            self.write(lineStream)
-            while lineStream.isTextAvailable():
-                baseRepr = baseRepr + '\n' + lineStream.getLine()
-        except AssertionError, e:
-            raise AssertionError, e
-        except:
-            try:
-                # Sometimes write insists on a seconds parameter.
-                self.write(lineStream, 0)
-                while lineStream.isTextAvailable():
-                    baseRepr = baseRepr + '\n' + lineStream.getLine()
-            except AssertionError, e:
-                raise AssertionError, e
-            except:
-                try:
-                    # Ok, no write function, lets try output then
-                    self.output(lineStream)
-                    while lineStream.isTextAvailable():
-                        baseRepr = baseRepr + '\n' + lineStream.getLine()
-                except AssertionError, e:
-                    raise AssertionError, e
-                except:
-                    pass
-        # In any case, return the baseRepr
-        return baseRepr
-
-    def __hash__(self):
-        return self.this
-
-
-
-
-

+ 0 - 870
direct/src/ffi/FFIInterrogateDatabase.py

@@ -1,870 +0,0 @@
-
-# Note: do not import this file directly, it is meant to be used as part of
-# a Python script (generatePythonCode) that sets up variables that this
-# module depends on
-
-import string
-import os
-import glob
-
-import FFIEnvironment
-import FFITypes
-import FFISpecs
-import FFIRename
-import FFIConstants
-import FFIOverload
-from direct.showbase.PythonUtil import *
-
-FFIConstants.notify.info('Importing interrogate library: ' + FFIConstants.InterrogateModuleName)
-
-# Note: we do a from lib import * here because we do not want
-# to be dependent on the name of the interrogate library in this code
-exec('from ' + FFIConstants.InterrogateModuleName + ' import *')
-
-
-def constructGlobalFile(codeDir, CModuleName):
-    """
-    Open a file that will hold the global values and functions code
-    """
-    file = open(os.path.join(codeDir, CModuleName + 'Globals' + '.py'), 'w')
-    return file
-
-
-def constructDowncastFile(codeDir, CModuleName):
-    """
-    Open a file that will hold the global values and functions code
-    """
-    file = open(os.path.join(codeDir, CModuleName + 'Downcasts' + '.py'), 'w')
-    return file
-
-
-def constructImportFile(codeDir, CModuleName):
-    """
-    Open a file that will hold the global values and functions code
-    """
-    file = open(os.path.join(codeDir, CModuleName + 'Modules' + '.py'), 'w')
-    return file
-
-def outputGlobalFileImports(file, methodList, CModuleName):
-    # Print the standard header
-    file.write(FFIConstants.generatedHeader)
-    file.write('# CMODULE [' + CModuleName + ']\n')
-
-    # Import Python's builtin types
-    file.write('from types import IntType, LongType, FloatType, NoneType, StringType\n')
-    file.write('from direct.ffi import FFIExternalObject\n')
-
-
-    # Import the C modules
-    CModuleList = []
-    for method in methodList:
-        if (not (method.typeDescriptor.moduleName in CModuleList)):
-            CModuleList.append(method.typeDescriptor.moduleName)
-    for CModuleName in CModuleList:
-        if CModuleName:
-            file.write('import ' + CModuleName + '\n')
-
-    moduleList = []
-    for method in methodList:
-        returnType = method.typeDescriptor.returnType.recursiveTypeDescriptor()
-        returnTypeName = returnType.foreignTypeName
-        if (not (returnTypeName in moduleList)):
-            if (returnType.__class__ == FFITypes.ClassTypeDescriptor):
-                moduleList.append(returnTypeName)
-
-        # Look at all the arguments
-        argTypes = method.typeDescriptor.argumentTypes
-        for argType in argTypes:
-            # Get the real return type (not derived)
-            argType = argType.typeDescriptor.recursiveTypeDescriptor()
-            if (not argType.isNested):
-                argTypeName = argType.foreignTypeName
-                # Do not put our own module in the import list
-                # Do not put modules already in the list (like a set)
-                if (not (argTypeName in moduleList)):
-                    # If this is a class (not a primitive), put it on the list
-                    if (argType.__class__ == FFITypes.ClassTypeDescriptor):
-                        moduleList.append(argTypeName)
-
-    
-    for moduleName in moduleList:
-        if moduleName:
-            file.write('import ' + moduleName + '\n')
-            file.write('import ' + moduleName + '1\n')
-    
-    file.write('\n')
-
-
-def outputImportFileImports(file, typeList, CModuleName):
-    """
-    This is the file that we will import to get all the panda modules
-    """
-    
-    # Print the standard header
-    file.write(FFIConstants.generatedHeader)
-    file.write('# CMODULE [' + CModuleName + ']\n')
-    file.write('# Import the interrogate module\n')
-    file.write('import ' + FFIConstants.InterrogateModuleName + '\n')
-    file.write('\n')
-    
-    file.write('# Import the C module\n')
-    file.write('import ' + CModuleName + '\n')
-
-    # Filter out only the class and enum type descriptors (not const, pointers, etc)
-    classTypeList = []
-    enumTypeList = []
-    for type in typeList:
-        if (type.__class__ == FFITypes.ClassTypeDescriptor):
-            if (not type.isNested):
-                classTypeList.append(type)
-        elif (type.__class__ == FFITypes.EnumTypeDescriptor):
-            if (not type.isNested):
-                enumTypeList.append(type)
-            
-    # Sort the types based on inheritance, most generic first
-    classTypeList.sort(FFIOverload.inheritanceLevelSort)
-
-    moduleList = []
-    for type in classTypeList:
-        moduleList.append(type.foreignTypeName)
-
-    file.write('# Import enums into the global name space\n')
-    for type in enumTypeList:
-        file.write('from ' + type.enumName + ' import *\n')
-    file.write('\n')
-
-    file.write('# Import downcast functions\n')
-    file.write('from ' + CModuleName + 'Downcasts import *\n')
-    file.write('\n')
-
-    file.write('# Import classes\n')
-    for moduleName in moduleList:
-        if moduleName:
-            file.write('import ' + moduleName + '\n')    
-    file.write('\n')
-
-    file.write('# Import classes2\n')
-    for moduleName in moduleList:
-        if moduleName:
-            file.write('import ' + moduleName + '1\n')    
-    file.write('\n')
-
-
-    file.write('# Import the global module file into our name space\n')
-    file.write('from ' + CModuleName + 'Globals import *\n')
-    file.write('\n')
-
-    file.write('# Generate the classes\n')
-    #for moduleName in moduleList:
-    #    file.write(moduleName + '.generateClass_' + moduleName + '()\n')
-    file.write('\n')
-        
-    file.write('# Copy the classes into our own namespace\n')
-    for moduleName in moduleList:
-        file.write(moduleName + ' = ' + moduleName + '.' + moduleName + '\n')
-    file.write('\n')
-
-    file.write('# Put the classes in the wrapper class map\n')
-    file.write('from direct.ffi.FFIExternalObject import registerInTypeMap\n')
-    file.write('\n')
-    for moduleName in moduleList:
-        file.write('registerInTypeMap(' + moduleName + ')\n')
-    file.write('\n')
-
-
-
-def getTypeName(typeIndex, scoped=0):
-    """
-    Return a fully specified type name for this type index
-    Return the scoped name if asked for it
-    """
-    nameComponents = []
-    name = ''
-
-    
-    if scoped:
-        typeName = interrogate_type_scoped_name(typeIndex)
-    else:        
-        typeName = interrogate_type_name(typeIndex)
-
-    if typeIndex == 0:
-        FFIConstants.notify.debug('typeIndex 0: ' + typeName)
-        
-    if interrogate_type_is_wrapped(typeIndex):
-        typeName = getTypeName(interrogate_type_wrapped_type(typeIndex))
-    if interrogate_type_is_const(typeIndex):
-        nameComponents.append('const')
-    if interrogate_type_is_pointer(typeIndex):
-        nameComponents.append('ptr')
-    if interrogate_type_is_signed(typeIndex):
-        # signed is now built into the type name
-        #nameComponents.append('signed')
-        pass
-    if interrogate_type_is_unsigned(typeIndex):
-        # unsigned is now built into the type name
-        #nameComponents.append('unsigned')
-        pass
-    if interrogate_type_is_long(typeIndex):
-        nameComponents.append('long')
-    if interrogate_type_is_longlong(typeIndex):
-        nameComponents.append('longLong')
-    if interrogate_type_is_short(typeIndex):
-        nameComponents.append('short')
-    if (len(nameComponents) > 0):
-        typeName = string.capitalize(typeName[0]) + typeName[1:]
-    nameComponents.append(typeName)
-    for i in range(len(nameComponents)):
-        if (i == 0):
-            name = name + nameComponents[i]
-        else:
-            name = name + string.capitalize(nameComponents[i][0]) + nameComponents[i][1:]
-
-    FFIConstants.notify.debug('typeIndex: ' + repr(typeIndex) + ' typeName: ' + typeName + ' has name: ' + name)
-
-    if not name:
-        FFIConstants.notify.warning('typeIndex: ' + repr(typeIndex) + ' typeName: ' + typeName + ' has no name')
-        name = "UnnamedType"
-
-    return name
-
-
-class FFIInterrogateDatabase:
-
-    def __init__(self, etcPath = []):
-        for dir in etcPath:
-            interrogate_add_search_directory(dir)
-        
-        self.typeIndexMap = {}
-        self.environment = FFIEnvironment.FFIEnvironment()
-
-    def isDefinedType(self, typeIndex):
-        return typeIndex in self.typeIndexMap
-    
-    def constructDescriptor(self, typeIndex):
-        if interrogate_type_is_atomic(typeIndex):
-            return self.constructPrimitiveTypeDescriptor(typeIndex)
-
-        elif interrogate_type_is_enum(typeIndex):
-            return self.constructEnumTypeDescriptor(typeIndex)
-        
-        elif interrogate_type_is_wrapped(typeIndex):
-            if interrogate_type_is_pointer(typeIndex):
-                return self.constructPointerTypeDescriptor(typeIndex)
-            elif interrogate_type_is_const(typeIndex):
-                return self.constructConstTypeDescriptor(typeIndex)
-        
-        elif (interrogate_type_is_class(typeIndex) or
-              interrogate_type_is_struct(typeIndex) or
-              interrogate_type_is_union(typeIndex)):
-            return self.constructClassTypeDescriptor(typeIndex)
-
-        elif (not interrogate_type_is_fully_defined(typeIndex)):
-            return  self.constructClassTypeDescriptor(typeIndex)
-        
-        else:
-            raise 'A type in the interrogate database was not recognized: '+ repr(typeIndex)
-    
-    def constructPrimitiveTypeDescriptor(self, typeIndex):
-        if self.isDefinedType(typeIndex):
-            return self.typeIndexMap[typeIndex]
-        else:
-            descriptor = FFITypes.PrimitiveTypeDescriptor()
-            #descriptor.environment = self.environment
-            descriptor.atomicType = interrogate_type_atomic_token(typeIndex)
-            if interrogate_type_has_module_name(typeIndex):
-                descriptor.moduleName = 'lib' + interrogate_type_module_name(typeIndex)
-            descriptor.foreignTypeName = \
-                FFIRename.nonClassNameFromCppName(getTypeName(typeIndex))
-            descriptor.typeIndex = typeIndex
-            self.typeIndexMap[typeIndex] = descriptor
-            return descriptor
-    
-    def constructEnumTypeDescriptor(self, typeIndex):
-        if self.isDefinedType(typeIndex):
-            return self.typeIndexMap[typeIndex]
-        else:
-            descriptor = FFITypes.EnumTypeDescriptor()
-            #descriptor.environment = self.environment
-            descriptor.isNested = interrogate_type_is_nested(typeIndex)
-            if descriptor.isNested:
-                outerTypeIndex = interrogate_type_outer_class(typeIndex)
-                descriptor.outerType = self.constructDescriptor(outerTypeIndex)
-            if interrogate_type_has_module_name(typeIndex):
-                descriptor.moduleName = 'lib' + interrogate_type_module_name(typeIndex)
-
-            # Enums are ints in C++ but we do not want to redefine the int type
-            # So we will just call them enums
-            descriptor.enumName = FFIRename.classNameFromCppName(getTypeName(typeIndex))
-            descriptor.foreignTypeName = '__enum__' + descriptor.enumName
-            numValues = interrogate_type_number_of_enum_values(typeIndex)
-
-            # Store the names and values of the enum in a dictionary
-            for i in range(numValues):
-                value = interrogate_type_enum_value(typeIndex, i)
-                name = FFIRename.classNameFromCppName(
-                    interrogate_type_enum_value_name(typeIndex, i))
-                scopedName = FFIRename.classNameFromCppName(
-                    interrogate_type_enum_value_scoped_name(typeIndex, i))
-                descriptor.values[name] = value
-            
-            descriptor.typeIndex = typeIndex
-            self.typeIndexMap[typeIndex] = descriptor
-            return descriptor
-
-    def constructPointerTypeDescriptor(self, typeIndex):
-        if self.isDefinedType(typeIndex):
-            return self.typeIndexMap[typeIndex]
-        descriptor = FFITypes.PointerTypeDescriptor()
-        #descriptor.environment = self.environment
-        descriptor.isNested = interrogate_type_is_nested(typeIndex)
-        if descriptor.isNested:
-            outerTypeIndex = interrogate_type_outer_class(typeIndex)
-            descriptor.outerType = self.constructDescriptor(outerTypeIndex)
-        if interrogate_type_has_module_name(typeIndex):
-            descriptor.moduleName = 'lib' + interrogate_type_module_name(typeIndex)
-        descriptor.foreignTypeName = \
-             FFIRename.nonClassNameFromCppName(getTypeName(typeIndex))
-        descriptor.typeIndex = typeIndex
-        wrappedTypeIndex = interrogate_type_wrapped_type(typeIndex)
-        wrappedTypeDescriptor = self.constructDescriptor(wrappedTypeIndex)
-        descriptor.typeDescriptor = wrappedTypeDescriptor
-        self.typeIndexMap[typeIndex] = descriptor
-        return descriptor
-    
-    def constructConstTypeDescriptor(self, typeIndex):
-        if self.isDefinedType(typeIndex):
-            return self.typeIndexMap[typeIndex]
-        descriptor = FFITypes.ConstTypeDescriptor()
-        #descriptor.environment = self.environment
-        descriptor.isNested = interrogate_type_is_nested(typeIndex)
-        if descriptor.isNested:
-            outerTypeIndex = interrogate_type_outer_class(typeIndex)
-            descriptor.outerType = self.constructDescriptor(outerTypeIndex)
-        if interrogate_type_has_module_name(typeIndex):
-            descriptor.moduleName = 'lib' + interrogate_type_module_name(typeIndex)
-        descriptor.foreignTypeName = \
-             FFIRename.nonClassNameFromCppName(getTypeName(typeIndex))
-        descriptor.typeIndex = typeIndex
-        wrappedTypeIndex = interrogate_type_wrapped_type(typeIndex)
-        wrappedTypeDescriptor = self.constructDescriptor(wrappedTypeIndex)
-        descriptor.typeDescriptor = wrappedTypeDescriptor
-        self.typeIndexMap[typeIndex] = descriptor
-        return descriptor
-
-    def constructParentTypeDescriptors(self, typeIndex):
-        numParents = interrogate_type_number_of_derivations(typeIndex)
-        descriptors = []
-        for i in range(numParents):
-            parentTypeIndex = interrogate_type_get_derivation(typeIndex, i)
-            if self.isDefinedType(parentTypeIndex):
-                parentTypeDescriptor = self.typeIndexMap[parentTypeIndex]
-            else:
-                parentTypeDescriptor = self.constructDescriptor(parentTypeIndex)
-            descriptors.append(parentTypeDescriptor)
-        return descriptors
-
-    def constructNestedTypeDescriptors(self, typeIndex):
-        nestedTypes = []
-        numNestedTypes = interrogate_type_number_of_nested_types(typeIndex)
-        for i in range(numNestedTypes):
-            nestedTypeIndex = interrogate_type_get_nested_type(typeIndex, i)
-            descriptor = self.constructDescriptor(nestedTypeIndex)
-            nestedTypes.append(descriptor)
-        return nestedTypes
-    
-    def constructClassTypeDescriptor(self, typeIndex):
-        if self.isDefinedType(typeIndex):
-            return self.typeIndexMap[typeIndex]
-        typeName = FFIRename.classNameFromCppName(getTypeName(typeIndex))
-        if typeName == "PyObject":
-            # A special case: the PyObject type is really a native
-            # Python object, not to be molested--it's not really an
-            # FFI class object.
-            descriptor = FFITypes.PyObjectTypeDescriptor()
-            self.typeIndexMap[typeIndex] = descriptor
-            return descriptor
-            
-        descriptor = FFITypes.ClassTypeDescriptor()
-        self.typeIndexMap[typeIndex] = descriptor
-        #descriptor.environment = self.environment
-        descriptor.foreignTypeName = typeName
-
-        if (typeName == "TypedObject"):
-            FFITypes.TypedObjectDescriptor = descriptor
-        
-        descriptor.isNested = interrogate_type_is_nested(typeIndex)
-        if descriptor.isNested:
-            outerTypeIndex = interrogate_type_outer_class(typeIndex)
-            descriptor.outerType = self.constructDescriptor(outerTypeIndex)
-        if interrogate_type_has_module_name(typeIndex):
-            descriptor.moduleName = 'lib' + interrogate_type_module_name(typeIndex)
-        if FFIConstants.wantComments:
-            if interrogate_type_has_comment(typeIndex):
-                descriptor.comment = interrogate_type_comment(typeIndex)
-        descriptor.typeIndex = typeIndex
-        descriptor.instanceMethods = self.constructMemberFunctionSpecifications(typeIndex)
-        descriptor.upcastMethods = self.constructUpcastFunctionSpecifications(typeIndex)
-        # Constructing downcasts does not return the functions, it just puts them in the class
-        # See the comment in that function
-        self.constructDowncastFunctionSpecifications(typeIndex)
-        descriptor.filterOutStaticMethods()
-        descriptor.constructors = self.constructConstructorSpecifications(typeIndex)
-        descriptor.destructor = self.constructDestructorSpecification(typeIndex)
-        descriptor.parentTypes = self.constructParentTypeDescriptors(typeIndex)
-        descriptor.nestedTypes = self.constructNestedTypeDescriptors(typeIndex)
-        return descriptor
-
-    def constructFunctionTypeDescriptors(self, functionIndex):
-
-        # Store these values because they will be the same for all the wrappers
-        isVirtual = interrogate_function_is_virtual(functionIndex)
-        #environment = self.environment
-        foreignTypeName = interrogate_function_name(functionIndex)
-        if FFIConstants.wantComments:
-            prototype = interrogate_function_prototype(functionIndex)
-            if interrogate_function_has_comment(functionIndex):
-                comment = interrogate_function_comment(functionIndex)
-            else:
-                comment = ''
-        # Prepend lib to the module name it reports because that will be the name of
-        # the Python module we import. This is apparently stems from a makefile
-        # discrepency in the way we build the libraries
-        if interrogate_function_has_module_name(functionIndex):
-            moduleName = 'lib' + interrogate_function_module_name(functionIndex)
-        else:
-            moduleName = None
-        typeIndex = functionIndex
-
-        # Look at the Python wrappers for this function
-        numPythonWrappers = interrogate_function_number_of_python_wrappers(functionIndex)
-        
-        if numPythonWrappers == 0:
-            # If there are no Python wrappers, it is because interrogate could not handle
-            # something about the function. Just return an empty list
-            return []
-
-        wrapperDescriptors = []
-
-        # Iterate over the wrappers constructing a FunctionTypeDescriptor for each
-        for i in range(numPythonWrappers):
-            descriptor = FFITypes.FunctionTypeDescriptor()
-            descriptor.isVirtual = isVirtual
-            #descriptor.environment = environment
-            descriptor.foreignTypeName = foreignTypeName
-            if FFIConstants.wantComments:
-                descriptor.comment = comment
-                descriptor.prototype = prototype
-            descriptor.moduleName = moduleName
-            descriptor.typeIndex = typeIndex
-            pythonFunctionIndex = interrogate_function_python_wrapper(functionIndex, i)
-            descriptor.wrapperName = interrogate_wrapper_name(pythonFunctionIndex)
-            # Even if it does not have a return value, it reports void which is better
-            # for generating code, so I will not even ask here
-            # if interrogate_wrapper_has_return_value(pythonFunctionIndex):
-            returnType = interrogate_wrapper_return_type(pythonFunctionIndex)
-            descriptor.returnType = self.constructDescriptor(returnType)
-            descriptor.argumentTypes = self.constructFunctionArgumentTypes(pythonFunctionIndex)
-            descriptor.userManagesMemory = interrogate_wrapper_caller_manages_return_value(pythonFunctionIndex)
-            descriptor.returnValueDestructor = interrogate_wrapper_return_value_destructor(pythonFunctionIndex)
-            wrapperDescriptors.append(descriptor)
-            
-        return wrapperDescriptors
-    
-    def constructFunctionArgumentTypes(self, functionIndex):
-        numArgs = interrogate_wrapper_number_of_parameters(functionIndex)
-        arguments = []
-        for argIndex in range(numArgs):
-            if interrogate_wrapper_parameter_has_name(functionIndex, argIndex):
-                name =  FFIRename.nonClassNameFromCppName(
-                    interrogate_wrapper_parameter_name(functionIndex, argIndex))
-            else:
-                name = ('parameter' + repr(argIndex))
-            descriptor = self.constructDescriptor(
-                interrogate_wrapper_parameter_type(functionIndex, argIndex))
-            
-            argSpec = FFISpecs.MethodArgumentSpecification()
-            if interrogate_wrapper_parameter_is_this(functionIndex, argIndex):
-                argSpec.isThis = 1
-            argSpec.name = name
-            argSpec.typeDescriptor = descriptor
-            arguments.append(argSpec)
-        return arguments
-        
-    def constructMemberFunctionSpecifications(self, typeIndex):
-        funcSpecs = []
-        numFuncs = interrogate_type_number_of_methods(typeIndex)
-        for i in range(numFuncs):
-            funcIndex = interrogate_type_get_method(typeIndex, i)
-            typeDescs = self.constructFunctionTypeDescriptors(funcIndex)
-            for typeDesc in typeDescs:
-                funcSpec = FFISpecs.MethodSpecification()
-                funcSpec.name = FFIRename.methodNameFromCppName(
-                    interrogate_function_name(funcIndex),
-                    getTypeName(typeIndex))
-                funcSpec.typeDescriptor = typeDesc
-                funcSpec.index = funcIndex
-                funcSpecs.append(funcSpec)
-        return funcSpecs
-
-    def constructUpcastFunctionSpecifications(self, typeIndex):
-        funcSpecs = []
-        numFuncs = interrogate_type_number_of_derivations(typeIndex)
-        for i in range(numFuncs):
-            if interrogate_type_derivation_has_upcast(typeIndex, i):
-                funcIndex = interrogate_type_get_upcast(typeIndex, i)
-                typeDescs = self.constructFunctionTypeDescriptors(funcIndex)
-                for typeDesc in typeDescs:
-                    funcSpec = FFISpecs.MethodSpecification()
-                    # We synthesize the upcast function name instead
-                    # of using the name supplied by interrogate, to
-                    # allow for possible renaming of types on this
-                    # side.
-                    funcSpec.name = 'upcastTo' + typeDesc.returnType.typeDescriptor.foreignTypeName
-                    #funcSpec.name = FFIRename.methodNameFromCppName(
-                    #    interrogate_function_name(funcIndex))
-                    funcSpec.typeDescriptor = typeDesc
-                    funcSpec.index = funcIndex
-                    funcSpecs.append(funcSpec)
-        return funcSpecs
-    
-    def constructDowncastFunctionSpecifications(self, typeIndex):
-        """
-        The strange thing about downcast functions is that they appear in the
-        class they are being downcast TO, not downcast FROM. But they should be
-        built into the class they are being downcast from. For instance, a method
-        downcastToNode(ptrBoundedObject) will appear in Node's list of methods
-        but should be compiled into BoundedObject's class
-        UPDATE: These are no longer compiled into the from-class. That was
-        preventing the libraries from being independent since the from class
-        now had knowledge of the to class which is potentially in a library
-        downstream. Now these functions are just global functions
-        """
-        numFuncs = interrogate_type_number_of_derivations(typeIndex)
-        for i in range(numFuncs):
-            # Make sure this downcast is possible
-            if (not interrogate_type_derivation_downcast_is_impossible(typeIndex, i)):
-                if interrogate_type_derivation_has_downcast(typeIndex, i):
-                    funcIndex = interrogate_type_get_downcast(typeIndex, i)
-                    typeDescs = self.constructFunctionTypeDescriptors(funcIndex)
-                    for typeDesc in typeDescs:
-                        funcSpec = FFISpecs.GlobalFunctionSpecification()
-                        funcSpec.name = FFIRename.methodNameFromCppName(
-                            interrogate_function_name(funcIndex),
-                            getTypeName(typeIndex))
-                        funcSpec.typeDescriptor = typeDesc
-                        funcSpec.index = funcIndex
-                        # Here we look for the class in the first argument
-                        fromClass = typeDesc.argumentTypes[0].typeDescriptor.recursiveTypeDescriptor()
-
-                        # Append the from class name on the method to uniquify it now
-                        # that these are global methods
-                        funcSpec.name = funcSpec.name + 'From' + fromClass.foreignTypeName
-                        
-                        # Append this funcSpec to that class's downcast methods
-                        # fromClass.downcastMethods.append(funcSpec)
-                        self.environment.addDowncastFunction(funcSpec)
-    
-    def constructConstructorSpecifications(self, typeIndex):
-        funcSpecs = []
-        numFuncs = interrogate_type_number_of_constructors(typeIndex)
-        for i in range(numFuncs):
-            funcIndex = interrogate_type_get_constructor(typeIndex, i)
-            typeDescs = self.constructFunctionTypeDescriptors(funcIndex)
-            for typeDesc in typeDescs:
-                funcSpec = FFISpecs.MethodSpecification()
-                funcSpec.name = 'constructor'
-                # funcSpec.name = FFIRename.methodNameFromCppName(
-                #    interrogate_function_name(funcIndex))
-                funcSpec.typeDescriptor = typeDesc
-                # Flag this function as being a constructor
-                funcSpec.constructor = 1
-                funcSpec.index = funcIndex            
-                funcSpecs.append(funcSpec)
-        return funcSpecs
-    
-    def constructDestructorSpecification(self, typeIndex):
-        if (not interrogate_type_has_destructor(typeIndex)):
-            return None
-        funcIndex = interrogate_type_get_destructor(typeIndex)
-        typeDescs = self.constructFunctionTypeDescriptors(funcIndex)
-        if (len(typeDescs) == 0):
-            return None
-        for typeDesc in typeDescs:
-            funcSpec = FFISpecs.MethodSpecification()
-            funcSpec.name = 'destructor'
-            # funcSpec.name = FFIRename.methodNameFromCppName(
-            #    interrogate_function_name(funcIndex))
-            funcSpec.typeDescriptor = typeDesc
-            funcSpec.index = funcIndex
-            return funcSpec
-    
-    def addTypes(self, CModuleName):
-        for i in range(interrogate_number_of_global_types()):
-            typeIndex = interrogate_get_global_type(i)
-            if self.typeInCModule(typeIndex, CModuleName):
-                self.constructDescriptor(typeIndex)
-
-    def addEnvironmentTypes(self):
-        for descriptor in self.typeIndexMap.values():
-            self.environment.addType(descriptor, descriptor.foreignTypeName)
-
-    def functionInCModule(self, funcIndex, CModuleName):
-        if interrogate_function_has_module_name(funcIndex):
-            moduleName = 'lib' + interrogate_function_module_name(funcIndex)
-            return (moduleName == CModuleName)
-
-    def typeInCModule(self, typeIndex, CModuleName):
-        if interrogate_type_has_module_name(typeIndex):
-            moduleName = 'lib' + interrogate_type_module_name(typeIndex)
-            return (moduleName == CModuleName)
-
-    
-    def constructGlobal(self, globalIndex, CModuleName):
-        # We really do not need the descriptor for the value, just
-        # the getter and setter
-        # typeIndex = interrogate_element_type(globalIndex)
-        # descriptor = self.constructDescriptor(typeIndex)
-        
-        if interrogate_element_has_getter(globalIndex):
-            getterIndex = interrogate_element_getter(globalIndex)
-            # If this function is not in this Cmodule just return
-            if not self.functionInCModule(getterIndex, CModuleName):
-                return None
-            getter = self.constructGlobalFunction(getterIndex)
-        else:
-            getter = None
-
-        if interrogate_element_has_setter(globalIndex):
-            setterIndex = interrogate_element_setter(globalIndex)
-            # If this function is not in this Cmodule just return
-            if not self.functionInCModule(setterIndex, CModuleName):
-                return None
-            setter = self.constructGlobalFunction(setterIndex)
-        else:
-            setter = None
-        globalSpec = FFISpecs.GlobalValueSpecification()
-        globalSpec.getter = getter
-        globalSpec.setter = setter
-        # globalSpec.typeDescriptor = descriptor
-        cppName = interrogate_element_name(globalIndex)
-        globalSpec.name = FFIRename.classNameFromCppName(cppName)
-        return globalSpec
-
-    def constructGlobalFunction(self, globalIndex):
-        descriptors = self.constructFunctionTypeDescriptors(globalIndex)
-        if (len(descriptors) == 0):
-            return None
-        funcSpecs = []
-        for descriptor in descriptors:
-            funcSpec = FFISpecs.GlobalFunctionSpecification()
-            funcSpec.typeDescriptor = descriptor
-            funcSpec.name = FFIRename.methodNameFromCppName(
-                funcSpec.typeDescriptor.foreignTypeName)
-            funcSpec.index = globalIndex
-            funcSpecs.append(funcSpec)
-        return funcSpecs
-        
-    def addGlobalFunctions(self, CModuleName):
-        numGlobals = interrogate_number_of_global_functions()
-        for i in range(numGlobals):
-            funcIndex = interrogate_get_global_function(i)
-            if self.functionInCModule(funcIndex, CModuleName):
-                newGlob = self.constructGlobalFunction(funcIndex)
-                if newGlob:
-                    self.environment.addGlobalFunction(newGlob)
-                    
-    def addGlobalValues(self, CModuleName):
-        numGlobals = interrogate_number_of_globals()
-        for i in range(numGlobals):
-            globalIndex = interrogate_get_global(i)
-            newGlob = self.constructGlobal(globalIndex, CModuleName)
-            if newGlob:
-                self.environment.addGlobalValue(newGlob)
-
-    def constructManifest(self, manifestIndex):
-        descriptor = None
-        intValue = None
-        getter = None
-
-        if interrogate_manifest_has_type(manifestIndex):
-            typeIndex = interrogate_manifest_get_type(manifestIndex)
-            descriptor = self.constructDescriptor(typeIndex)
-
-        definition = interrogate_manifest_definition(manifestIndex)
-
-        # See if this manifest is an int. There are shortcuts if it is.
-        # If it does have an int value, there will be no getter, we will
-        # just output the value in the generated code
-        if interrogate_manifest_has_int_value(manifestIndex):
-            intValue = interrogate_manifest_get_int_value(manifestIndex)
-        else:
-            # See if this manifest has a getter
-            if interrogate_manifest_has_getter(manifestIndex):
-                getterIndex = interrogate_manifest_getter(manifestIndex)
-                getter = self.constructGlobalFunction(getterIndex)
-
-        manifestSpec = FFISpecs.ManifestSpecification()
-        manifestSpec.typeDescriptor = descriptor
-        manifestSpec.definition = definition
-        manifestSpec.intValue = intValue
-        manifestSpec.getter = getter
-        cppName = interrogate_manifest_name(manifestIndex)
-        manifestSpec.name = FFIRename.classNameFromCppName(cppName)
-        return manifestSpec
-
-    def addManifestSymbols(self):
-        numManifests = interrogate_number_of_manifests()
-        for i in range(numManifests):
-            manifestIndex = interrogate_get_manifest(i)
-            newManifest = self.constructManifest(manifestIndex)
-            self.environment.addManifest(newManifest)
-
-
-    def generateCode(self, codeDir, extensionsDir):
-        # Empty out the codeDir of unnecessary crud from previous runs
-        # before we begin.
-        for file in os.listdir(codeDir):
-            pathname = os.path.join(codeDir, file)
-            if not os.path.isdir(pathname):
-                os.unlink(pathname)
-        
-        # Import all the C++ modules
-        for CModuleName in FFIConstants.CodeModuleNameList:
-            self.generateCodeLib(codeDir, extensionsDir, CModuleName)
-
-        # For convenience, output a file that imports all the c module files
-        file = open(os.path.join(codeDir, FFIConstants.importModuleName + '.py'), 'w')
-        for CModuleName in FFIConstants.CodeModuleNameList:
-            file.write('from ' + CModuleName + 'Modules import *\n')
-        file.close()
-
-        # Generate an empty __init__.py to make the directory a Python
-        # package.
-        init = os.path.join(codeDir, '__init__.py')
-        file = open(init, 'w')
-        file.close()
-
-    def squeezeGeneratedCode(self, outputDir, deleteSource=True):
-
-        # Since we will be squeezing the importModuleName file, rename
-        # the original to something we can import from within the
-        # squeezed version.
-        squeezedName = FFIConstants.importModuleName
-        unsqueezedName = FFIConstants.importModuleName + 'Unsqueezed'
-
-        os.rename(os.path.join(outputDir, squeezedName + '.py'),
-                  os.path.join(outputDir, unsqueezedName + '.py'))
-
-        # Get the list of files to squeeze.  This is all of the .py
-        # files in the output directory except for the __init__.py
-        # file.
-        
-        files = glob.glob(os.path.join(outputDir, '*.py'))
-        init = os.path.join(outputDir, '__init__.py')
-        try:
-            files.remove(init)
-        except:
-            pass
-
-        print "Squeezing %s files." % (len(files))
-
-        from direct.showbase import pandaSqueezeTool
-        
-        pandaSqueezeTool.squeeze(squeezedName, unsqueezedName,
-                                 files, outputDir)
-
-        if deleteSource:
-            # Remove the now-squeezed source files.
-            for file in files:
-                os.remove(file)
-        
-
-    def generateCodeLib(self, codeDir, extensionsDir, CModuleName):
-        # Reset the environment so we are clean from any old modules
-        self.environment.reset()
-
-        FFIConstants.notify.info('='*50)
-        FFIConstants.notify.warning('Importing code library: ' + CModuleName)
-        exec('import ' + CModuleName)
-
-        if interrogate_error_flag():
-            FFIConstants.notify.error("Error reading interrogate database; can't continue.")
-
-        self.updateBindings(CModuleName)
-        
-        FFIConstants.notify.info('Generating type code...')
-        for type in self.environment.types.values():
-            # Do not generate code for nested types at the top level
-            if (not type.isNested):
-                type.generateGlobalCode(codeDir, extensionsDir)
-
-
-        FFIConstants.notify.info('Generating global downcast code...')
-        downcastFile = constructDowncastFile(codeDir, CModuleName)
-        # Output all the imports based on this list of functions
-        outputGlobalFileImports(downcastFile,
-                                self.environment.downcastFunctions,
-                                CModuleName)
-        for type in self.environment.downcastFunctions:
-            type.generateGlobalDowncastCode(downcastFile)
-            
-        FFIConstants.notify.info('Generating global code...')
-        globalFile = constructGlobalFile(codeDir, CModuleName)
-
-        # Make a list of all the global functions. This includes the normal
-        # global functions as well as the getters and setters on all the
-        # global values. This list is used to figure out what files to import
-        # Only include the global functions from the current C module
-        globalFunctions = self.environment.globalFunctions
-        for globalValue in self.environment.globalValues:
-            if globalValue.getter:
-                globalFunctions.append(globalValue.getter)
-            if globalValue.setter:
-                globalFunctions.append(globalValue.setter)
-        # Output all the imports based on this list of functions
-        outputGlobalFileImports(globalFile, globalFunctions, CModuleName)
-
-        # Generate overloading
-        overloadedGlobalFunctions = {}
-        for methodSpec in globalFunctions:
-            methodList = overloadedGlobalFunctions.setdefault(methodSpec.name, [])
-            methodList.append(methodSpec)
-
-        overloadedGlobalFunctions = FFIOverload.cullOverloadedMethods(overloadedGlobalFunctions)
-
-        for methodSpecList in overloadedGlobalFunctions.values():
-            treeColl = FFIOverload.FFIMethodArgumentTreeCollection(None, methodSpecList)
-            treeColl.generateCode(globalFile, -1)
-
-        FFIConstants.notify.info('Generating global values...')
-        for type in self.environment.globalValues:
-            type.generateGlobalCode(globalFile)
-            
-        FFIConstants.notify.info('Generating global functions...')
-        for type in self.environment.globalFunctions:
-            type.generateGlobalCode(globalFile)
-
-        FFIConstants.notify.info('Generating manifests...')
-        for type in self.environment.manifests:
-            type.generateGlobalCode(globalFile)
-
-        globalFile.close()
-
-        FFIConstants.notify.info('Generating import code...')
-        importFile = constructImportFile(codeDir, CModuleName)
-        outputImportFileImports(importFile, self.environment.types.values(), CModuleName)
-
-    def updateBindings(self, CModuleName):
-        FFIConstants.notify.info('Updating Bindings')
-        FFIConstants.notify.info('Adding Types...')
-        self.addTypes(CModuleName)
-        FFIConstants.notify.info('Adding global values...')
-        self.addGlobalValues(CModuleName)
-        FFIConstants.notify.info('Adding global functions...')
-        self.addGlobalFunctions(CModuleName)
-        FFIConstants.notify.info('Adding manifests symbols...')
-        self.addManifestSymbols()
-        FFIConstants.notify.info('Adding environment types...')
-        self.addEnvironmentTypes()
-
-

+ 0 - 472
direct/src/ffi/FFIOverload.py

@@ -1,472 +0,0 @@
-from direct.showbase.PythonUtil import *
-from types import *
-import string
-import FFIConstants
-import FFISpecs
-import FFITypes
-
-"""
-Things that are not supported:
- - Overloading a function based on an enum being differentiated from an int
- - Type names from C++ cannot have __enum__ in their name
- - Overloading static and non-static methods with the same name
-"""
-
-AT_not_atomic = 0
-AT_int = 1
-AT_float = 2
-AT_double = 3
-AT_bool = 4
-AT_char = 5
-AT_void = 6
-AT_string = 7
-AT_longlong = 8
-
-def cullOverloadedMethods(fullMethodDict):
-    """
-    Find all the entries that have multiple indexes for the same method name
-    Get rid of all others.
-    """
-    tmpDict = {}
-    # For each class
-    for methodName in fullMethodDict.keys():
-        methodList = fullMethodDict[methodName]
-        # See if this method has more than one function index (overloaded)
-        if (len(methodList) > 1):
-            tmpDict[methodName] = methodList
-            # Mark all the method specifications as overloaded
-            for methodSpec in methodList:
-                methodSpec.overloaded = 1
-
-    return tmpDict
-
-
-def getTypeName(classTypeDesc, typeDesc):
-    """
-    Map the interrogate primitive type names to python type names.
-    We assume that the module using this has imported the types module.
-    It is valid to pass in None for classTypeDesc if we are not in a class
-    """
-
-    typeName = typeDesc.getFullNestedName()
-
-    # Atomic C++ types are type checked against the builtin
-    # Python types. This code sorts out the mapping
-    if typeDesc.isAtomic():
-
-        # Ints, bools, and chars are treated as ints.
-        # Enums are special and are not atomic, see below
-        if ((typeDesc.atomicType == AT_int) or
-            (typeDesc.atomicType == AT_bool) or
-            (typeDesc.atomicType == AT_char)):
-            return 'IntType'
-
-        # Floats and doubles are both floats in Python
-        elif ((typeDesc.atomicType == AT_float) or
-            (typeDesc.atomicType == AT_double)):
-            return 'FloatType'
-
-        elif ((typeDesc.atomicType == AT_longlong)):
-            return 'LongType'
-
-        # Strings are treated as Python strings
-        elif ((typeDesc.atomicType == AT_string)):
-            return 'StringType'
-
-        elif (typeDesc.atomicType == AT_void):
-            # Convert the void type to None type... I guess...
-            # So far we do not have any code that uses this
-            return 'NoneType'
-
-        else:
-            FFIConstants.notify.error("Unknown atomicType: %s" % (typeDesc.atomicType))
-
-    # If the type is an enum, we really want to treat it like an int
-    # To handle this, the type will have __enum__ in the name
-    # Usually it will start the typeName, but some typeNames have the
-    # surrounding class as part of their name
-    # like BoundedObject.__enum__BoundingVolumeType
-    elif (typeName.find('__enum__') >= 0):
-        return 'IntType'
-
-    # If it was not atomic or enum, it must be a class which is a
-    # bit trickier because we output different things depending on the
-    # scoping of the type.
-    else:
-
-        #   classTypeDesc  typeDesc fullNestedName Resulting TypeName
-        # 1   Outer         Other     Other          Other.Other
-        # 2   Outer         Outer     Outer          Outer
-        # 3   Outer         Inner     Outer.Inner    Outer.Inner
-        # 4   Inner         Other     Other          Other.Other
-        # 5   Inner         Outer     Outer          Outer
-        # 6   Inner         Inner     Outer.Inner    Outer.Inner
-        # 7   None          Other     Other          Other.Other
-
-        # CASES 1, 4, and 7 are the only ones that are different from the full
-        # nested name, returning Other.Other
-
-        returnNestedTypeNames = string.split(typeName, '.')
-        returnModuleName = returnNestedTypeNames[0]
-
-        if classTypeDesc:
-            classTypeName = classTypeDesc.getFullNestedName()
-            classNestedTypeNames = string.split(classTypeName, '.')
-            # If there is no nesting, return typeName.typeName
-            if ((not (classTypeDesc.foreignTypeName in returnNestedTypeNames)) and
-                (not (typeDesc.foreignTypeName in classNestedTypeNames))):
-                return (returnModuleName + '.' + typeName)
-            # All other cases, we just need typeName
-            else:
-                return typeName
-        else:
-            # If you had no class, you need to specify module plus typename
-            return (returnModuleName + '.' + typeName)
-
-
-def inheritsFrom(type1, type2):
-    """
-    Return true if type1 inherits from type2
-    This works by recursively checking parentTypes for type1
-    """
-    if type1.parentTypes:
-        if type2 in type1.parentTypes:
-            return 1
-        else:
-            result = 0
-            for type in type1.parentTypes:
-                result = (result or inheritsFrom(type, type2))
-            return result
-    else:
-        return 0
-
-def getInheritanceLevel(type, checkNested = 1):
-    if type.__class__ == FFITypes.PyObjectTypeDescriptor:
-        # A special case: PyObject * is always the most general
-        # object.  Everything is a PyObject.
-        return -1
-
-    # If this is a nested type, return the inheritance level of the outer type.
-    if type.isNested:
-        # Check the level of your outer class
-        # pass the checkNested flag as 0 to prevent an infinite loop
-        # between the parent and child
-        level = getInheritanceLevel(type.outerType, 0)
-    else:
-        level = 0
-
-    for parentType in type.parentTypes:
-        # Add 1 because you are one level higher than your parent
-        level = max(level, 1+getInheritanceLevel(parentType))
-
-    if checkNested:
-        for nestedType in type.nestedTypes:
-            # Do not add 1 to your nested types
-            level = max(level, getInheritanceLevel(nestedType))
-
-    return level
-
-def inheritanceLevelSort(type1, type2):
-    level1 = getInheritanceLevel(type1)
-    level2 = getInheritanceLevel(type2)
-    if (level1 == level2):
-        # If they are equal in the inheritance,
-        # sort them alphabetically by their type name
-        return cmp(type1.foreignTypeName, type2.foreignTypeName)
-    elif (level1 < level2):
-        return -1
-    elif (level1 > level2):
-        return 1
-
-
-def subclass(type1, type2):
-    """
-    Helper funcion used in sorting classes by inheritance
-    """
-    # If the types are the same, return 0
-    if type1 == type2:
-        return 0
-    # If you have no args, sort you first
-    elif (type1 == 0):
-        return 1
-    elif (type2 == 0):
-        return -1
-    # If class1 inherits from class2 return -1
-    elif inheritsFrom(type1, type2):
-        return -1
-    # If class2 inherits from class1 return 1
-    elif inheritsFrom(type2, type1):
-        return 1
-    else:
-        # This is the don't care case. We must specify a sorting
-        # rule just so it is not arbitrary
-        if (type1.foreignTypeName > type2.foreignTypeName):
-            return -1
-        else:
-            return 1
-
-
-class FFIMethodArgumentTreeCollection:
-    def __init__(self, classTypeDesc, methodSpecList):
-        self.classTypeDesc = classTypeDesc
-        self.methodSpecList = methodSpecList
-        self.methodDict = {}
-        self.treeDict = {}
-
-    def outputOverloadedMethodHeader(self, file, nesting):
-        # If one is static, we assume they all are.
-        # The current system does not support overloading static and non-static
-        # methods with the same name
-        # Constructors are not treated as static. They are special because
-        # they are not really constructors, they are instance methods that fill
-        # in the this pointer.
-        # Global functions do not need static versions
-        if (self.methodSpecList[0].isStatic() and
-            (not self.methodSpecList[0].isConstructor())):
-            indent(file, nesting, 'def ' +
-                   self.methodSpecList[0].name + '(*_args):\n')
-        else:
-            indent(file, nesting, 'def ' +
-                   self.methodSpecList[0].name + '(self, *_args):\n')
-        self.methodSpecList[0].outputCFunctionComment(file, nesting+2)
-        indent(file, nesting+2, 'numArgs = len(_args)\n')
-
-    def outputOverloadedMethodFooter(self, file, nesting):
-        # If this is a static method, we need to output a static version
-        # If one is static, we assume they all are.
-        # The current system does not support overloading static and non-static
-        # methods with the same name
-        # Constructors are not treated as static. They are special because
-        # they are not really constructors, they are instance methods that fill
-        # in the this pointer.
-        methodName = self.methodSpecList[0].name
-
-        if (self.methodSpecList[0].isStatic() and
-            (not self.methodSpecList[0].isConstructor()) and
-            (not isinstance(self.methodSpecList[0], FFISpecs.GlobalFunctionSpecification))):
-                self.outputOverloadedStaticFooter(file, nesting)
-        else:
-            if self.classTypeDesc:
-                indent(file, nesting,   "FFIExternalObject.funcToMethod("+methodName+','+ self.classTypeDesc.foreignTypeName+ ",'"+methodName+"')\n")
-                indent(file, nesting,   'del '+methodName+'\n')
-                indent(file, nesting, ' \n')
-
-        indent(file, nesting+1, '\n')
-
-    def outputOverloadedStaticFooter(self, file, nesting):
-        # foo = staticmethod(foo)
-        methodName = self.methodSpecList[0].name
-        indent(file, nesting, self.classTypeDesc.foreignTypeName + '.' + methodName + ' = staticmethod(' + methodName + ')\n')
-        indent(file, nesting,'del ' +methodName+' \n\n')
-
-    def setup(self):
-        for method in self.methodSpecList:
-            numArgs = len(method.typeDescriptor.thislessArgTypes())
-            numArgsList = self.methodDict.setdefault(numArgs, [])
-            numArgsList.append(method)
-        for numArgs in self.methodDict.keys():
-            methodList = self.methodDict[numArgs]
-            tree = FFIMethodArgumentTree(self.classTypeDesc, methodList)
-            treeList = self.treeDict.setdefault(numArgs, [])
-            treeList.append(tree)
-
-    def generateCode(self, file, nesting):
-        self.setup()
-        self.outputOverloadedMethodHeader(file, nesting)
-        numArgsKeys = self.treeDict.keys()
-        numArgsKeys.sort()
-        for i in range(len(numArgsKeys)):
-            numArgs = numArgsKeys[i]
-            trees = self.treeDict[numArgs]
-            for tree in trees:
-                # If this is the first case, output an if clause
-                if (i == 0):
-                    indent(file, nesting+2, 'if (numArgs == ' + repr(numArgs) + '):\n')
-                # If this is a subsequent first case, output an elif clause
-                else:
-                    indent(file, nesting+2, 'elif (numArgs == ' + repr(numArgs) + '):\n')
-                tree.setup()
-                tree.traverse(file, nesting+1, 0)
-
-        # If the overloaded function got all the way through the if statements
-        # it must have had the wrong number or type of arguments
-        indent(file, nesting+2, "else:\n")
-        indent(file, nesting+3, "raise TypeError, 'Invalid number of arguments: ' + repr(numArgs) + ', expected one of: ")
-        for numArgs in numArgsKeys:
-            indent(file, 0, (repr(numArgs) + ' '))
-        indent(file, 0, "'\n")
-
-        self.outputOverloadedMethodFooter(file, nesting)
-
-
-
-class FFIMethodArgumentTree:
-    """
-    Tree is made from nested dictionaries.
-    The keys are methodNamed.
-    The values are [tree, methodSpec]
-    methodSpec may be None at any level
-    If tree is None, it is a leaf node and methodSpec will be defined
-    """
-    def __init__(self, classTypeDesc, methodSpecList):
-        self.argSpec = None
-        self.classTypeDesc = classTypeDesc
-        self.methodSpecList = methodSpecList
-        # The actual tree is implemented as nested dictionaries
-        self.tree = {}
-
-    def setup(self):
-        for methodSpec in self.methodSpecList:
-            argTypes = methodSpec.typeDescriptor.thislessArgTypes()
-            self.fillInArgTypes(argTypes, methodSpec)
-
-    def fillInArgTypes(self, argTypes, methodSpec):
-        # If the method takes no arguments, we will assign a type index of 0
-        if (len(argTypes) == 0):
-            self.tree[0] = [
-                FFIMethodArgumentTree(self.classTypeDesc,
-                                      self.methodSpecList),
-                methodSpec]
-
-        else:
-            self.argSpec = argTypes[0]
-            typeDesc = self.argSpec.typeDescriptor.recursiveTypeDescriptor()
-
-            if (len(argTypes) == 1):
-                # If this is the last parameter, we are a leaf node, so store the
-                # methodSpec in this dictionary
-                self.tree[typeDesc] = [None, methodSpec]
-            else:
-                if typeDesc in self.tree:
-                    # If there already is a tree here, jump into and pass the
-                    # cdr of the arg list
-                    subTree = self.tree[typeDesc][0]
-                    subTree.fillInArgTypes(argTypes[1:], methodSpec)
-                else:
-                    # Add a subtree for the rest of the arg list
-                    subTree = FFIMethodArgumentTree(self.classTypeDesc,
-                                                    self.methodSpecList)
-                    subTree.fillInArgTypes(argTypes[1:], methodSpec)
-                    # This subtree has no method spec
-                    self.tree[typeDesc] = [subTree, None]
-
-    def traverse(self, file, nesting, level):
-        oneTreeHasArgs = 0
-        typeNameList = []
-
-        # First see if this tree branches at all. If it does not there are
-        # drastic optimizations we can take because we can simply call the
-        # bottom-most function. We are not checking the types of all the
-        # arguments for the sake of type checking, we are simply trying to
-        # figure out which overloaded function to call. If there is only
-        # one overloaded function with this number of arguements at this
-        # level, it must be the one. No need to continue checking all the
-        # arguments.
-        branches = 0
-        subTree = self
-        prevTree = subTree
-        levelCopy = level
-
-        while subTree:
-            if (len(subTree.tree.keys()) == 0):
-                # Dead end branch
-                break
-            if (len(subTree.tree.keys()) > 1):
-                # Ok, we branch, it was worth a try though
-                branches = 1
-                break
-
-            prevTree = subTree
-            # Must only have one subtree, traverse it
-            subTree = subTree.tree.values()[0][0]
-            levelCopy += 1
-
-        # If there were no branches, this is easy
-        # Just output the function and return
-        # Note this operates on prevTree because subTree went one too far
-        if not branches:
-            methodSpec = prevTree.tree.values()[0][1]
-            indent(file, nesting+2, 'return ')
-            methodSpec.outputOverloadedCall(file, prevTree.classTypeDesc, levelCopy)
-            return
-
-        # Ok, We must have a branch down here somewhere
-        # Make a copy of the keys so we can sort them in place
-        sortedKeys = self.tree.keys()
-        # Sort the keys based on inheritance hierarchy, most specific classes first
-        sortedKeys.sort(subclass)
-
-        for i in range(len(sortedKeys)):
-            typeDesc = sortedKeys[i]
-            # See if this takes no arguments
-            if (typeDesc == 0):
-                # Output the function
-                methodSpec = self.tree[0][1]
-                indent(file, nesting+2, 'return ')
-                methodSpec.outputOverloadedCall(file, self.classTypeDesc, 0)
-            else:
-                # This is handled at the top of the file now (?)
-                # Import a file if we need to for this typeDesc
-                # if ((typeDesc != 0) and
-                #     (not typeDesc.isNested) and
-                #     # Do not put our own module in the import list
-                #     (self.classTypeDesc != typeDesc) and
-                #     # If this is a class (not a primitive), put it on the list
-                #     (typeDesc.__class__ == FFITypes.ClassTypeDescriptor)):
-                #     indent(file, nesting+2, 'import ' + typeDesc.foreignTypeName + '\n')
-
-                # Specify that at least one of these trees had arguments
-                # so we know to output an else clause
-                oneTreeHasArgs = 1
-                typeName = getTypeName(self.classTypeDesc, typeDesc)
-                typeNameList.append(typeName)
-                if typeDesc.__class__ == FFITypes.PyObjectTypeDescriptor:
-                    # A special case: if one of the parameters is
-                    # PyObject *, that means anything is accepted.
-                    condition = '1'
-
-                else:
-                    # Otherwise, we'll check the particular type of
-                    # the object.
-                    condition = '(isinstance(_args[' + repr(level) + '], ' + typeName + '))'
-                    # Legal types for a float parameter include int and long.
-                    if (typeName == 'FloatType'):
-                        condition += (' or (isinstance(_args[' + repr(level) + '], IntType))')
-                        condition += (' or (isinstance(_args[' + repr(level) + '], LongType))')
-                    # Legal types for a long parameter include int.
-                    elif (typeName == 'LongType'):
-                        condition += (' or (isinstance(_args[' + repr(level) + '], IntType))')
-                    # Legal types for an int parameter include long.
-                    elif (typeName == 'IntType'):
-                        condition += (' or (isinstance(_args[' + repr(level) + '], LongType))')
-
-                indent(file, nesting+2, 'if ' + condition + ':\n')
-
-                if (self.tree[typeDesc][0] is not None):
-                    self.tree[typeDesc][0].traverse(file, nesting+1, level+1)
-                else:
-                    methodSpec = self.tree[typeDesc][1]
-                    indent(file, nesting+3, 'return ')
-                    numArgs = level+1
-                    methodSpec.outputOverloadedCall(file, self.classTypeDesc, numArgs)
-
-        # Output an else clause if one of the trees had arguments
-        if oneTreeHasArgs:
-            indent(file, nesting+2, "raise TypeError, 'Invalid argument " + repr(level) + ", expected one of: ")
-            for name in typeNameList:
-                indent(file, 0, ('<' + name + '> '))
-            indent(file, 0, "'\n")
-
-    def isSinglePath(self):
-        if (len(self.tree.keys()) > 1):
-            # More than one child, return false
-            return 0
-        else:
-            # Only have one child, see if he only has one child
-            key = self.tree.keys()[0]
-            tree = self.tree[key][0]
-            if tree:
-                return tree.isSinglePath()
-            else:
-                return self.tree[key][1]
-

+ 0 - 151
direct/src/ffi/FFIRename.py

@@ -1,151 +0,0 @@
-import FFIConstants
-from string import *
-
-
-pythonKeywords = ['and','del','for','is','raise','assert','elif','from','lambda','return','break','else','global','not','try','class','except','if','or','while','continue','exec','import','pass','def','finally','in','print']
-
-
-methodRenameDictionary = {
-    'operator==':  '__eq__',
-    'operator!=':  '__ne__',
-    'operator<<':  '__lshift__',
-    'operator>>':  '__rshift__',
-    'operator<':   '__lt__',
-    'operator>':   '__gt__',
-    'operator<=':  '__le__',
-    'operator>=':  '__ge__',
-    'operator=':   'assign',
-    'operator()':  '__call__',
-    'operator[]':  '__getitem__',
-    'operator++':  'increment',
-    'operator--':  'decrement',
-    'operator^':   '__xor__',
-    'operator%':   '__mod__',
-    'operator!':   'logicalNot',
-    'operator~':   'bitwiseNot',
-    'operator&':   '__and__',
-    'operator&&':  'logicalAnd',
-    'operator|':   '__or__',
-    'operator||':  'logicalOr',
-    'operator+':   '__add__',
-    'operator-':   '__sub__',
-    'operator*':   '__mul__',
-    'operator/':   '__div__',
-    'operator+=':  '__iadd__',
-    'operator-=':  '__isub__',
-    'operator*=':  '__imul__',
-    'operator/=':  '__idiv__',
-    'operator,':   'concatenate',
-    'operator|=':  '__ior__',
-    'operator&=':  '__iand__',
-    'operator^=':  '__ixor__',
-    'operator~=':  'bitwiseNotEqual',
-    'operator->':  'dereference',
-    'operator<<=': '__ilshift__',
-    'operator>>=': '__irshift__',
-    'print':       'Cprint',
-    'CInterval.setT': '_priv__cSetT',
-    }
-    
-classRenameDictionary = {
-    'Loader':                    'PandaLoader',
-    'String':                    'CString',
-    'LMatrix4f':                 'Mat4',
-    'LMatrix3f':                 'Mat3',
-    'LVecBase4f':                'VBase4',
-    'LVector4f':                 'Vec4',
-    'LPoint4f':                  'Point4',
-    'LVecBase3f':                'VBase3',
-    'LVector3f':                 'Vec3',
-    'LPoint3f':                  'Point3',
-    'LVecBase2f':                'VBase2',
-    'LVector2f':                 'Vec2',
-    'LPoint2f':                  'Point2',
-    'LQuaternionf':              'Quat',
-    'LMatrix4d':                 'Mat4D',
-    'LMatrix3d':                 'Mat3D',
-    'LVecBase4d':                'VBase4D',
-    'LVector4d':                 'Vec4D',
-    'LPoint4d':                  'Point4D',
-    'LVecBase3d':                'VBase3D',
-    'LVector3d':                 'Vec3D',
-    'LPoint3d':                  'Point3D',
-    'LVecBase2d':                'VBase2D',
-    'LVector2d':                 'Vec2D',
-    'LPoint2d':                  'Point2D',
-    'LQuaterniond':              'QuatD',
-    'Plane':                     'PlaneBase',
-    'Planef':                    'Plane',
-    'Planed':                    'PlaneD',
-    'Frustum':                   'FrustumBase',
-    'Frustumf':                  'Frustum',
-    'Frustumd':                  'FrustumD'
-    }
-
-
-def checkKeyword(cppName):
-    if cppName in pythonKeywords:
-        cppName = '_' + cppName
-    return cppName
-
-# TODO: Make faster - this thing is horribly slow    
-def classNameFromCppName(cppName):
-    # initialize to empty string
-    className = ''
-    # These are the characters we want to strip out of the name
-    badChars = '!@#$%^&*()<>,.-=+~{}? '
-    nextCap = 0
-    firstChar = 1
-    for char in cppName:
-        if (char in badChars):
-            continue
-        elif (char == '_'):
-            nextCap = 1
-            continue
-        elif (nextCap or firstChar):
-            className = className + capitalize(char)
-            nextCap = 0
-            firstChar = 0
-        else:
-            className = className + char
-    if className in classRenameDictionary:
-        className = classRenameDictionary[className]
-
-    if (className == ''):
-        FFIConstants.notify.warning('Renaming class: ' + cppName + ' to empty string')
-    # FFIConstants.notify.debug('Renaming class: ' + cppName + ' to: ' + className)
-    # Note we do not have to check for keywords because class name are capitalized
-    return className
-    
-def nonClassNameFromCppName(cppName):
-    className = classNameFromCppName(cppName)
-    # Make the first character lowercase
-    newName = lower(className[0])+className[1:]
-    # Mangle names that happen to be python keywords so they are not anymore
-    newName = checkKeyword(newName)
-    return newName
-
-def methodNameFromCppName(cppName, className = None):
-    methodName = ''
-    badChars = ' '
-    nextCap = 0
-    for char in cppName:
-        if (char in badChars):
-            continue
-        elif (char == '_'):
-            nextCap = 1
-            continue
-        elif nextCap:
-            methodName = methodName + capitalize(char)
-            nextCap = 0
-        else:
-            methodName = methodName + char
-
-    if className != None:
-        methodName = methodRenameDictionary.get(className + '.' + methodName, methodName)
-    methodName = methodRenameDictionary.get(methodName, methodName)
-    
-    # Mangle names that happen to be python keywords so they are not anymore
-    methodName = checkKeyword(methodName)
-    return methodName
-

+ 0 - 586
direct/src/ffi/FFISpecs.py

@@ -1,586 +0,0 @@
-
-import FFIConstants
-import FFITypes
-import FFIOverload
-import string
-
-from direct.showbase.PythonUtil import *
-
-augmentedAssignments = ['__iadd__', '__isub__', '__imul__', '__idiv__',
-                        '__ior__', '__iand__', '__ixor__',
-                        '__ilshift__', '__irshift__']
-
-class FunctionSpecification:
-    def __init__(self):
-        self.name = ''
-        self.typeDescriptor = None
-        self.index = 0
-        self.overloaded = 0
-        # Is this function a constructor
-        self.constructor = 0
-
-    def isConstructor(self):
-        return self.constructor
-
-    def isStatic(self):
-        for arg in self.typeDescriptor.argumentTypes:
-            if arg.isThis:
-                return 0
-        # No args were this pointers, must be static
-        return 1
-
-    def outputTypeChecking(self, methodClass, args, file, nesting):
-        """
-        Output an assert statement to check the type of each arg in this method
-        This can be turned off with a command line parameter in generatePythonCode
-        It is valid to pass in None for methodClass if you are not in any methodClass
-        """
-        if FFIConstants.wantTypeChecking:
-            for i in range(len(args)):
-                methodArgSpec = args[i]
-                typeDesc = methodArgSpec.typeDescriptor.recursiveTypeDescriptor()
-                typeName = FFIOverload.getTypeName(methodClass, typeDesc)
-
-                # We only do type checking on class types.  C++ can do
-                # type checking on the primitive types, and will do a
-                # better job anyway.
-                if typeDesc.__class__ == FFITypes.ClassTypeDescriptor:
-                    # Get the real return type (not derived)
-                    if ((not typeDesc.isNested) and
-                        # Do not put our own module in the import list
-                        (methodClass != typeDesc)):
-                        indent(file, nesting, 'import ' + typeDesc.foreignTypeName + '\n')
-                    indent(file, nesting, 'if not isinstance(' +
-                           methodArgSpec.name + ', ' + typeName + '):\n')
-                    indent(file, nesting + 1,
-                           'raise TypeError, "Invalid argument %s, expected <%s>"\n' % (i, typeDesc.foreignTypeName))
-
-    def outputCFunctionComment(self, file, nesting):
-        """
-        Output a docstring to the file describing the C++ call with type info
-        Also output the C++ comment from interrogate.
-        """
-        if FFIConstants.wantComments:
-            indent(file, nesting, '"""\n')
-
-            # Output the function prototype
-            if self.typeDescriptor.prototype:
-                indent(file, nesting, self.typeDescriptor.prototype + '\n')
-
-            # Output the function comment
-            if self.typeDescriptor.comment:
-                # To insert tabs into the comment, replace all newlines with a newline+tabs
-                comment = string.replace(self.typeDescriptor.comment,
-                                         '\n', ('\n' + ('    ' * nesting)))
-                indent(file, nesting, comment)
-
-            indent(file, 0, '\n')
-            indent(file, nesting, '"""\n')
-
-    def getFinalName(self):
-        """
-        Return the name of the function given that it might be overloaded
-        If it is overloaded, prepend "overloaded", then append the types of
-        each argument to make it unique.
-
-        So "getChild(int)" becomes "overloaded_getChild_int(int)"
-        """
-        if self.overloaded:
-            name = 'private__overloaded_' + self.name
-            for methodArgSpec in self.typeDescriptor.argumentTypes:
-                name = name + '_' + methodArgSpec.typeDescriptor.foreignTypeName
-            return name
-        else:
-            return self.name
-
-    def outputOverloadedCall(self, file, classTypeDesc, numArgs):
-        """
-        Write the function call to call this overloaded method
-        For example:
-          self.overloaded_setPos_ptrNodePath_float_float_float(*_args)
-        If it is a class (static) method, call the class method
-          Class.overloaded_setPos_ptrNodePath_float_float_float(*_args)
-
-        Constructors are not treated as static. They are special because
-        they are not really constructors, they are instance methods that fill
-        in the this pointer.
-
-        These do not get indented because they are not the beginning of the line
-
-        If classTypeDesc is None, then this is a global function and should
-        output code as such
-
-        """
-        if classTypeDesc:
-            if (self.isStatic() and not self.isConstructor()):
-                if numArgs:
-                    indent(file, 0, classTypeDesc.foreignTypeName + '.' + self.getFinalName() + '(*_args)\n')
-                else:
-                    indent(file, 0, classTypeDesc.foreignTypeName + '.' + self.getFinalName() + '()\n')
-            else:
-                if numArgs:
-                    indent(file, 0, 'self.' + self.getFinalName() + '(*_args)\n')
-                else:
-                    indent(file, 0, 'self.' + self.getFinalName() + '()\n')
-        else:
-            if numArgs:
-                indent(file, 0, self.getFinalName() + '(*_args)\n')
-            else:
-                indent(file, 0, self.getFinalName() + '()\n')
-
-
-class GlobalFunctionSpecification(FunctionSpecification):
-    def __init__(self):
-        FunctionSpecification.__init__(self)
-
-    # Use generateCode when creating a global (non-class) function
-    def generateGlobalCode(self, file):
-        self.outputHeader(file)
-        self.outputBody(file)
-        self.outputFooter(file)
-
-    # Use generateCode when creating a global (non-class) function
-    def generateGlobalDowncastCode(self, file):
-        self.outputHeader(file)
-        self.outputBody(file, 0, 0) # no downcast, no type checking
-        self.outputFooter(file)
-
-    # Use generateMethodCode when creating a global->class function
-    def generateMethodCode(self, methodClass, file, nesting):
-        self.outputMethodHeader(methodClass, file, nesting)
-        self.outputMethodBody(methodClass, file, nesting)
-        self.outputMethodFooter(methodClass, file, nesting)
-
-    ##################################################
-    ## Global Function Code Generation
-    ##################################################
-    def outputHeader(self, file):
-        argTypes = self.typeDescriptor.argumentTypes
-        indent(file, 0, 'def ' + self.getFinalName() + '(')
-        for i in range(len(argTypes)):
-            file.write(argTypes[i].name)
-            if (i < (len(argTypes)-1)):
-                file.write(', ')
-        file.write('):\n')
-
-    def outputBody(self, file, needsDowncast=1, typeChecking=1):
-        # The method body will look something like
-        #     returnValue = PandaGlobal.method(arg)
-        #     returnObject = NodePath()
-        #     returnObject.this = returnValue
-        #     returnObject.userManagesMemory = 1  (optional)
-        #     return returnObject
-        self.outputCFunctionComment(file, 1)
-        argTypes = self.typeDescriptor.argumentTypes
-        # The global downcast functions do not need type checking, and
-        # in fact have a problem where they assert you are downcasting
-        # from the immediate superclass when in fact you may be downcasting
-        # from a class way up the chain as long as their is single
-        # inheritance up to it
-        if typeChecking:
-            self.outputTypeChecking(None, argTypes, file, 1)
-        indent(file, 1, 'returnValue = ' + self.typeDescriptor.moduleName
-                   + '.' + self.typeDescriptor.wrapperName + '(')
-        for i in range(len(argTypes)):
-            file.write(argTypes[i].passName())
-            if (i < (len(argTypes)-1)):
-                file.write(', ')
-        file.write(')\n')
-        #indent(file, 1, 'if returnValue is None:\n')
-        #indent(file, 2, 'return None\n')
-        returnType = self.typeDescriptor.returnType.recursiveTypeDescriptor()
-        returnType.generateReturnValueWrapper(None, file,
-                                              self.typeDescriptor.userManagesMemory,
-                                              needsDowncast, 1)
-
-    def outputFooter(self, file):
-        indent(file, 0, '\n')
-
-    ##################################################
-    ## Class Method Code Generation
-    ##################################################
-    def outputMethodHeader(self, methodClass, file, nesting):
-        argTypes = self.typeDescriptor.argumentTypes
-        indent(file, nesting, 'def ' + self.getFinalName() + '(')
-        for i in range(len(argTypes)):
-            # Instead of the first argument, put self
-            if (i == 0):
-                file.write('self')
-            else:
-                file.write(argTypes[i].name)
-            if (i < (len(argTypes)-1)):
-                file.write(', ')
-        file.write('):\n')
-
-    def outputMethodBody(self, methodClass, file, nesting):
-        # The method body will look something like
-        #     returnValue = PandaGlobal.method(self.this, arg)
-        #     returnValue.userManagesMemory = 1  (optional)
-        #     return returnValue
-        self.outputCFunctionComment(file, nesting+2)
-        argTypes = self.typeDescriptor.argumentTypes
-        self.outputTypeChecking(methodClass, argTypes[1:], file, nesting+2)
-        indent(file, nesting+2, 'returnValue = ' + self.typeDescriptor.moduleName
-                   + '.' + self.typeDescriptor.wrapperName + '(')
-        for i in range(len(argTypes)):
-            # Instead of the first argument, put self.this
-            if (i == 0):
-                file.write('self.this')
-            else:
-                file.write(argTypes[i].passName())
-            if (i < (len(argTypes)-1)):
-                file.write(', ')
-        file.write(')\n')
-        indent(file, 1, 'if returnValue is None:\n')
-        indent(file, 2, 'return None\n')
-
-        returnType = self.typeDescriptor.returnType.recursiveTypeDescriptor()
-        returnType.generateReturnValueWrapper(methodClass, file,
-                                              self.typeDescriptor.userManagesMemory,
-                                              1, nesting+2)
-
-    def outputMethodFooter(self, methodClass, file, nesting):
-        indent(file, nesting+1, '\n')
-
-
-class MethodSpecification(FunctionSpecification):
-    def __init__(self):
-        FunctionSpecification.__init__(self)
-
-    def generateConstructorCode(self, methodClass, file, nesting):
-        self.outputConstructorHeader(methodClass, file, nesting)
-        self.outputConstructorBody(methodClass, file, nesting)
-        self.outputConstructorFooter(methodClass, file, nesting)
-
-    def generateDestructorCode(self, methodClass, file, nesting):
-        self.outputDestructorHeader(methodClass, file, nesting)
-        self.outputDestructorBody(methodClass, file, nesting)
-        self.outputDestructorFooter(methodClass, file, nesting)
-
-    def generateMethodCode(self, methodClass, file, nesting):
-        self.outputMethodHeader(methodClass, file, nesting)
-        self.outputMethodBody(methodClass, file, nesting)
-        self.outputMethodFooter(methodClass, file, nesting)
-
-    def generateStaticCode(self, methodClass, file, nesting):
-        self.outputStaticHeader(methodClass, file, nesting)
-        self.outputStaticBody(methodClass, file, nesting)
-        self.outputStaticFooter(methodClass, file, nesting)
-
-    def generateInheritedMethodCode(self, methodClass, parentList, file, nesting, needsDowncast):
-        self.outputInheritedMethodHeader(methodClass, parentList, file, nesting, needsDowncast)
-        self.outputInheritedMethodBody(methodClass, parentList, file, nesting, needsDowncast)
-        self.outputInheritedMethodFooter(methodClass, parentList, file, nesting, needsDowncast)
-
-    def generateUpcastMethodCode(self, methodClass, file, nesting):
-        # The upcast method code is just like regular code, but the
-        # return value wrapper does not have downcasting instructions
-        self.outputMethodHeader(methodClass, file, nesting)
-        self.outputMethodBody(methodClass, file, nesting, 0) # no downcast
-        self.outputMethodFooter(methodClass, file, nesting)
-
-    ##################################################
-    ## Constructor Code Generation
-    ##################################################
-    def outputConstructorHeader(self, methodClass, file, nesting):
-        argTypes = self.typeDescriptor.argumentTypes
-        thislessArgTypes = self.typeDescriptor.thislessArgTypes()
-        indent(file, nesting+1, 'def ' + self.getFinalName() + '(self')
-        if (len(thislessArgTypes) > 0):
-            file.write(', ')
-            for i in range(len(thislessArgTypes)):
-                file.write(thislessArgTypes[i].name)
-                if (i < (len(thislessArgTypes)-1)):
-                    file.write(', ')
-        file.write('):\n')
-        self.outputCFunctionComment(file, nesting+2)
-
-
-    def outputConstructorBody(self, methodClass, file, nesting):
-        # The method body will look something like
-        #     self.this = panda.Class_constructor(arg)
-        #     self.userManagesMemory = 1  (optional)
-        argTypes = self.typeDescriptor.argumentTypes
-        thislessArgTypes = self.typeDescriptor.thislessArgTypes()
-        self.outputTypeChecking(methodClass, thislessArgTypes, file, nesting+2)
-        indent(file, nesting+2, 'self.this = ' + self.typeDescriptor.moduleName + '.'
-                   + self.typeDescriptor.wrapperName + '(')
-        # Do not pass self into the constructor
-        for i in range(len(thislessArgTypes)):
-            file.write(thislessArgTypes[i].passName())
-            if (i < (len(thislessArgTypes)-1)):
-                file.write(', ')
-        file.write(')\n')
-        indent(file, nesting+2, 'assert self.this != 0\n')
-        if self.typeDescriptor.userManagesMemory:
-            indent(file, nesting+2, 'self.userManagesMemory = 1\n')
-
-    def outputConstructorFooter(self, methodClass, file, nesting):
-        indent(file, nesting+1, '\n')
-
-
-    ##################################################
-    ## Destructor Code Generation
-    ##################################################
-    def outputDestructorHeader(self, methodClass, file, nesting):
-        argTypes = self.typeDescriptor.argumentTypes
-        thislessArgTypes = self.typeDescriptor.thislessArgTypes()
-        indent(file, nesting+1, 'def ' + self.getFinalName() + '(self')
-        if (len(thislessArgTypes) > 0):
-            file.write(', ')
-            for i in range(len(thislessArgTypes)):
-                file.write(thislessArgTypes[i].name)
-                if (i < (len(thislessArgTypes)-1)):
-                    file.write(', ')
-        file.write('):\n')
-        self.outputCFunctionComment(file, nesting+2)
-
-    def outputDestructorBody(self, methodClass, file, nesting):
-        # The method body will look something like
-        #     panda.Class_destructor(self.this)
-        functionName = (self.typeDescriptor.moduleName + '.'
-                        + self.typeDescriptor.wrapperName)
-        # Make sure the module and function have not been deleted first
-        # This only happens during shutdown
-        indent(file, nesting+2, 'if (' + self.typeDescriptor.moduleName + ' and ' +
-               functionName + '):\n')
-        indent(file, nesting+3, functionName + '(self.this)\n')
-
-    def outputDestructorFooter(self, methodClass, file, nesting):
-        indent(file, nesting+1, '\n')
-
-    ##################################################
-    ## Method Code Generation
-    ##################################################
-    def outputMethodHeader(self, methodClass, file, nesting):
-        argTypes = self.typeDescriptor.argumentTypes
-        thislessArgTypes = self.typeDescriptor.thislessArgTypes()
-        indent(file, nesting, 'def ' + self.getFinalName() + '(self')
-        if (len(thislessArgTypes) > 0):
-            file.write(', ')
-            for i in range(len(thislessArgTypes)):
-                file.write(thislessArgTypes[i].name)
-                if (i < (len(thislessArgTypes)-1)):
-                    file.write(', ')
-        file.write('):\n')
-
-    def outputMethodBody(self, methodClass, file, nesting, needsDowncast=1):
-        # The method body will look something like
-        #     returnValue = panda.Class_method(self.this, arg)
-        #     returnValue.userManagesMemory = 1  (optional)
-        #     return returnValue
-        self.outputCFunctionComment(file, nesting+2)
-        argTypes = self.typeDescriptor.argumentTypes
-        thislessArgTypes = self.typeDescriptor.thislessArgTypes()
-        self.outputTypeChecking(methodClass, thislessArgTypes, file, nesting+2)
-        indent(file, nesting+2, 'returnValue = ' + self.typeDescriptor.moduleName + '.'
-                   + self.typeDescriptor.wrapperName + '(')
-        file.write('self.this')
-        if (len(thislessArgTypes) > 0):
-            file.write(', ')
-            for i in range(len(thislessArgTypes)):
-                file.write(thislessArgTypes[i].passName())
-                if (i < (len(thislessArgTypes)-1)):
-                    file.write(', ')
-        file.write(')\n')
-        # If this is an augmented assignment operator like +=, we have special rules
-        # In this case we simply call the C++ function, make sure we got the same
-        # return value back, then return self. Otherwise if you let it go through the
-        # normal system, it actually deletes the old Python object causing the C++ memory
-        # to be deleted then returns a new Python shadow object with the old C++ pointer... BAD!
-        if self.getFinalName() in augmentedAssignments:
-            indent(file, nesting+2, 'assert self.this == returnValue\n')
-            indent(file, nesting+2, 'return self\n')
-        else:
-            returnType = self.typeDescriptor.returnType.recursiveTypeDescriptor()
-            returnType.generateReturnValueWrapper(methodClass, file,
-                                                  self.typeDescriptor.userManagesMemory,
-                                                  needsDowncast, nesting+2)
-
-    def outputMethodFooter(self, methodClass, file, nesting):
-        indent(file, nesting,  'FFIExternalObject.funcToMethod(' +self.getFinalName()+ ',' + methodClass.foreignTypeName + ",'" +self.getFinalName() +"')\n")
-        indent(file, nesting,  'del ' + self.getFinalName()+' \n')
-        indent(file, nesting+1,'\n')
-        #indent(file, nesting, methodClass.foreignTypeName +'.'+  self.getFinalName() + ' = staticmethod(' + self.getFinalName() + ')\n')
-        #indent(file, nesting,'del ' + self.getFinalName()+' \n')
-        #indent(file, nesting+1, '\n')
-        indent(file, nesting+1, '\n')
-
-
-    ##################################################
-    ## Static Method Code Generation
-    ##################################################
-    def outputStaticHeader(self, methodClass, file, nesting):
-        argTypes = self.typeDescriptor.argumentTypes
-        indent(file, nesting, 'def ' + self.getFinalName() + '(')
-        for i in range(len(argTypes)):
-            file.write(argTypes[i].name)
-            if (i < (len(argTypes)-1)):
-                    file.write(', ')
-        file.write('):\n')
-
-
-    def outputStaticBody(self, methodClass, file, nesting):
-        # The method body will look something like
-        #     returnValue = panda.class_method(self.this, arg)
-        #     returnValue.userManagesMemory = 1  (optional)
-        #     return returnValue
-        self.outputCFunctionComment(file, nesting+2)
-        argTypes = self.typeDescriptor.argumentTypes
-        thislessArgTypes = self.typeDescriptor.thislessArgTypes()
-        self.outputTypeChecking(methodClass, thislessArgTypes, file, nesting+2)
-        indent(file, nesting+2, 'returnValue = ' + self.typeDescriptor.moduleName + '.'
-                   + self.typeDescriptor.wrapperName + '(')
-        # Static methods do not take the this parameter
-        if (len(thislessArgTypes) > 0):
-            for i in range(len(thislessArgTypes)):
-                file.write(thislessArgTypes[i].passName())
-                if (i < (len(thislessArgTypes)-1)):
-                    file.write(', ')
-        file.write(')\n')
-        returnType = self.typeDescriptor.returnType.recursiveTypeDescriptor()
-        returnType.generateReturnValueWrapper(methodClass, file,
-                                              self.typeDescriptor.userManagesMemory,
-                                              1, nesting+2)
-
-    def outputStaticFooter(self, methodClass, file, nesting):
-        indent(file, nesting, methodClass.foreignTypeName +'.'+  self.getFinalName() + ' = staticmethod(' + self.getFinalName() + ')\n')
-        indent(file, nesting,'del ' + self.getFinalName()+' \n')
-        indent(file, nesting+1, '\n')
-
-    ##################################################
-    ## Upcast Method Code Generation
-    ##################################################
-    def outputInheritedMethodHeader(self, methodClass, parentList, file, nesting, needsDowncast):
-        argTypes = self.typeDescriptor.argumentTypes
-        thislessArgTypes = self.typeDescriptor.thislessArgTypes()
-        indent(file, nesting, 'def ' + self.getFinalName() + '(self')
-        if (len(thislessArgTypes) > 0):
-            file.write(', ')
-            for i in range(len(thislessArgTypes)):
-                file.write(thislessArgTypes[i].name)
-                if (i < (len(thislessArgTypes)-1)):
-                    file.write(', ')
-        file.write('):\n')
-
-    def outputInheritedMethodBody(self, methodClass, parentList, file, nesting, needsDowncast):
-        # The method body will look something like
-        #     upcastSelf = self.upcastToParentClass()
-        #     returnValue = libpanda.method(upcastSelf.this, arg)
-        #     returnValue.userManagesMemory = 1  (optional)
-        #     return returnValue
-        self.outputCFunctionComment(file, nesting+2)
-        argTypes = self.typeDescriptor.argumentTypes
-        thislessArgTypes = self.typeDescriptor.thislessArgTypes()
-        self.outputTypeChecking(methodClass, thislessArgTypes, file, nesting+2)
-        indent(file, nesting+2, 'upcastSelf = self\n')
-        for i in range(len(parentList)):
-            # Only output the upcast call if that parent class defines it
-            parentClass = parentList[i]
-            methodName = 'upcastTo' + parentClass.foreignTypeName
-            if (i != 0):
-                childClass = parentList[i-1]
-                if childClass.hasMethodNamed(methodName):
-                    indent(file, nesting+2, 'upcastSelf = upcastSelf.' + methodName + '()\n')
-                else:
-                    indent(file, nesting+2, '# upcastSelf = upcastSelf.' + methodName + '()\n')
-            else:
-                if methodClass.hasMethodNamed(methodName):
-                    indent(file, nesting+2, 'upcastSelf = upcastSelf.' + methodName + '()\n')
-                else:
-                    indent(file, nesting+2, '# upcastSelf = upcastSelf.' + methodName + '()\n')
-
-        indent(file, nesting+2, 'returnValue = ' + self.typeDescriptor.moduleName
-               + '.' + self.typeDescriptor.wrapperName + '(upcastSelf.this')
-        if (len(thislessArgTypes) > 0):
-            file.write(', ')
-            for i in range(len(thislessArgTypes)):
-                file.write(thislessArgTypes[i].passName())
-                if (i < (len(thislessArgTypes)-1)):
-                    file.write(', ')
-        file.write(')\n')
-        returnType = self.typeDescriptor.returnType.recursiveTypeDescriptor()
-        # Generate the return value code with no downcast instructions
-        returnType.generateReturnValueWrapper(methodClass, file,
-                                              self.typeDescriptor.userManagesMemory,
-                                              needsDowncast, nesting+2)
-
-    def outputInheritedMethodFooter(self, methodClass, parentList, file, nesting, needsDowncast):
-        indent(file, nesting,  'FFIExternalObject.funcToMethod(' +self.getFinalName()+ ',' + methodClass.foreignTypeName + ",'" +self.getFinalName() +"')\n")
-        indent(file, nesting,  'del ' + self.getFinalName()+' \n')
-        indent(file, nesting+1,'\n')
-
-
-class GlobalValueSpecification:
-    def __init__(self):
-        self.name = ''
-        # We really do not need the descriptor for the value, just
-        # the getter and setter
-        # self.typeDescriptor = None
-        # To be filled in with a GlobalFunctionSpecification
-        self.getter = None
-        # To be filled in with a GlobalFunctionSpecification
-        self.setter = None
-
-    def generateGlobalCode(self, file):
-        indent(file, 0, '# Global value: ' + self.name + '\n')
-        if self.getter:
-            self.getter.generateGlobalCode(file)
-        if self.setter:
-            self.setter.generateGlobalCode(file)
-        indent(file, 0, '\n')
-
-
-# Manifest symbols
-class ManifestSpecification:
-    def __init__(self):
-        self.name = ''
-
-        # We are not currently using the type descriptor
-        self.typeDescriptor = None
-
-        # To be filled in with a GlobalFunctionSpecification
-        # if this manifest has one
-        self.getter = None
-
-        # Manifests that have int values have their int value defined
-        # instead of having to call a getter (because there are so many of them)
-        self.intValue = None
-
-        # The string definition of this manifest
-        self.definition = None
-
-    def generateGlobalCode(self, file):
-        # Note, if the manifest has no value and no getter we do not output anything
-        # even though they may be defined in the C++ sense. Without any values
-        # they are pretty useless in Python
-
-        # If it has an int value, just output that instead of bothering
-        # with a getter
-        if (self.intValue != None):
-            indent(file, 0, '# Manifest: ' + self.name + '\n')
-            indent(file, 0, (self.name + ' = ' + repr(self.intValue) + '\n'))
-            indent(file, 0, '\n')
-
-        elif self.definition:
-            indent(file, 0, ('# Manifest: ' + self.name + ' definition: ' +
-                             self.definition + '\n'))
-            # Out put the getter
-            if self.getter:
-                self.getter.generateGlobalCode(file)
-            indent(file, 0, '\n')
-
-
-class MethodArgumentSpecification:
-    def __init__(self):
-        self.name = ''
-        self.typeDescriptor = None
-        # By default it is not the this pointer
-        self.isThis = 0
-
-    def passName(self):
-        if (self.typeDescriptor.recursiveTypeDescriptor().__class__ == \
-            FFITypes.ClassTypeDescriptor):
-            return self.name + '.this'
-        else:
-            return self.name

+ 0 - 1026
direct/src/ffi/FFITypes.py

@@ -1,1026 +0,0 @@
-
-"""
-Type Descriptors
-
-Type Descriptors are used for code generation of C++ types. They
-know everything they need to know about themselves to generate code.
-They get constructed by and stored in FFIInterrogateDatabase.
-
-"""
-
-import os
-import string
-import FFIConstants
-import FFIOverload
-
-
-from direct.showbase.PythonUtil import *
-
-TypedObjectDescriptor = None
-
-
-class BaseTypeDescriptor:
-    """
-    A type descriptor contains everything you need to know about a C++ function,
-    class, or primitive.
-    """
-    def __init__(self):
-        # The pythonified name from C++
-        self.foreignTypeName = ''
-
-        # The typeIndex for lookup in the typeIndexMap
-        self.typeIndex = 0
-
-        # The C++ prototype for this type
-        self.prototype = ''
-
-        # The C++ comment for this type
-        self.comment = ''
-
-        # Is this a nested type?
-        self.isNested = 0
-
-        # If we are nested, this is the typeDescriptor we are nested in
-        self.outerType = None
-
-        # The type descriptors for the types we derive from
-        self.parentTypes = []
-
-        # atomicType may be one of the following
-        # AT_not_atomic = 0
-        # AT_int = 1
-        # AT_float = 2
-        # AT_double = 3
-        # AT_bool = 4
-        # AT_char = 5
-        # AT_void = 6
-        # AT_string = 7
-        # AT_longlong = 8
-        # By default this type is not atomic
-        self.atomicType = 0
-
-        # What C module did this type come from?
-        self.moduleName = ''
-
-    def isAtomic(self):
-        return (self.atomicType != 0)
-
-    def generateGlobalCode(self, dir, extensionsDir):
-        # By default generate no code
-        pass
-    def recursiveTypeDescriptor(self):
-        """
-        Attempt to get to the bottom of a type descriptor
-        Since we are at the bottom when we get here, just return self
-        """
-        return self
-    def recordOverloadedMethods(self):
-        # By default do nothing
-        pass
-    def generateReturnValueWrapper(self, classTypeDesc, file, userManagesMemory,
-                                   needsDowncast, nesting):
-        # By default do nothing
-        pass
-    def getFullNestedName(self):
-        """
-        If this type is nested, it will return the fully specified name
-        For example:  OuterClass.InnerClass.ReallyInnerClass
-        """
-        if self.isNested:
-            return self.outerType.getFullNestedName() + '.' + self.foreignTypeName
-        else:
-            return self.foreignTypeName
-
-
-class PrimitiveTypeDescriptor(BaseTypeDescriptor):
-    """
-    Primitive type descriptors include int, float, char, etc.
-    These get mapped to Python types like IntType, FloatType, StringType
-    """
-    def __init__(self):
-        BaseTypeDescriptor.__init__(self)
-
-    def generateReturnValueWrapper(self, classTypeDesc, file, userManagesMemory,
-                                   needsDowncast, nesting):
-        """
-        Write code to the file that will return a primitive to the caller.
-        Pretty simple since there is no extra work needed here
-        """
-        indent(file, nesting, 'return returnValue\n')
-
-class PyObjectTypeDescriptor(BaseTypeDescriptor):
-    """
-    This is a special type descriptor for a PyObject * parameter,
-    which means a natural Python object of any type, to be passed
-    through without molestation.
-    """
-    def __init__(self):
-        BaseTypeDescriptor.__init__(self)
-
-    def generateReturnValueWrapper(self, classTypeDesc, file, userManagesMemory,
-                                   needsDowncast, nesting):
-        indent(file, nesting, 'return returnValue\n')
-
-
-
-class EnumTypeDescriptor(PrimitiveTypeDescriptor):
-    """
-    EnumTypeDescriptors represent enums in C++
-    """
-    def __init__(self):
-        PrimitiveTypeDescriptor.__init__(self)
-        # A dictionary of name, value pairs for this enum
-        self.values = {}
-        # The enum name is different than the foreignTypeName because
-        # we record the foreignTypeName as enum (int)
-        self.enumName = ''
-        # Specify that we do not have any parent or nested types to make
-        # the sorting based on inheritance happy. Essentially, we do not
-        # inherit from anybody or have any nested types
-        self.parentTypes = []
-        self.nestedTypes = []
-
-    def generateGlobalCode(self, dir, extensionsDir):
-        """
-        Generate enum code for this type.
-        """
-        fileName = self.enumName + '.py'
-        file = open(os.path.join(dir, fileName), 'w')
-        indent(file, 0, FFIConstants.generatedHeader)
-        self.generateCode(file, 0)
-
-    def generateCode(self, file, nesting):
-        indent(file, nesting, '# CMODULE [' + self.moduleName + ']\n')
-        self.outputComment(file, nesting)
-        self.outputValues(file, nesting)
-
-
-    def outputComment(self, file, nesting):
-        indent(file, nesting, '\n')
-        indent(file, nesting, '##################################################\n')
-        indent(file, nesting, '#  Enum ' + self.enumName + '\n')
-        indent(file, nesting, '##################################################\n')
-        indent(file, nesting, '\n')
-
-    def outputValues(self, file, nesting):
-        """
-        For each entry in the dictionary, output a line for name, value pairs
-        Example:
-        off = 0
-        on = 1
-        """
-        for key in self.values.keys():
-            indent(file, nesting, key + ' = ' + repr(self.values[key]) + '\n')
-
-
-class DerivedTypeDescriptor(BaseTypeDescriptor):
-    """
-    DerivedTypeDescriptor is a wrapper around a primitive or class type
-    For instance const, or pointer to.
-    """
-    def __init__(self):
-        BaseTypeDescriptor.__init__(self)
-        self.typeDescriptor = None
-
-    def recursiveTypeDescriptor(self):
-        """
-        Attempt to get to the bottom of a type descriptor by
-        recursively unravelling typeDescriptors until you get to
-        a type that is not derived (primitive or class) in which
-        case the base class will just return self.
-        """
-        return self.typeDescriptor.recursiveTypeDescriptor()
-
-class PointerTypeDescriptor(DerivedTypeDescriptor):
-    """
-    Points to another type descriptor
-    """
-    def __init__(self):
-        DerivedTypeDescriptor.__init__(self)
-
-class ConstTypeDescriptor(DerivedTypeDescriptor):
-    """
-    Const version of another type descriptor
-    """
-    def __init__(self):
-        DerivedTypeDescriptor.__init__(self)
-
-class ClassTypeDescriptor(BaseTypeDescriptor):
-    """
-    This describes a C++ class. It holds lists of all its methods too.
-    It can also generate Python shadow class code for itself.
-    """
-    def __init__(self):
-        BaseTypeDescriptor.__init__(self)
-
-        # Methods interrogate told us were constructors
-        self.constructors = []
-
-        # A method interrogate told us is the destructor
-        self.destructor = None
-
-        # Methods interrogate told us were instance methods
-        # Note: the methods without the this pointer get moved into staticMethods
-        self.instanceMethods = []
-
-        # Methods interrogate told us were upcast methods
-        self.upcastMethods = []
-
-        # Methods interrogate told us were downcast methods
-        self.downcastMethods = []
-
-        # Instance methods that had no this pointer are moved into here
-        self.staticMethods = []
-
-        # These are dictionaries used to temporarily hold methods for
-        # overloading while generating code
-        self.overloadedClassMethods = {}
-        self.overloadedInstanceMethods = {}
-
-        # Nested typeDescriptors inside this class
-        self.nestedTypes = []
-
-
-    def getExtensionModuleName(self):
-        """
-        Return a filename for the extensions for this class
-        Example: NodePath extensions would be found in NodePath-extensions.py
-        """
-        return self.foreignTypeName + '-extensions.py'
-
-    def getCModulesRecursively(self, parent):
-        # Now look at all the methods that we might inherit if we are at
-        # a multiple inheritance node and get their C modules
-        for parentType in parent.parentTypes:
-            if (not (parentType.moduleName in self.CModules)):
-                self.CModules.append(parentType.moduleName)
-            for method in parentType.instanceMethods:
-                if (not (method.typeDescriptor.moduleName in self.CModules)):
-                    self.CModules.append(method.typeDescriptor.moduleName)
-            for method in parentType.upcastMethods:
-                if (not (method.typeDescriptor.moduleName in self.CModules)):
-                        self.CModules.append(method.typeDescriptor.moduleName)
-            self.getCModulesRecursively(parentType)
-
-    def getCModules(self):
-        """
-        Return a list of all the C modules this class references
-        """
-        try:
-            # Prevent from doing the work twice
-            # if CModules is already defined, just return it
-            return self.CModules
-        except:
-            # Otherwise, it must be our first time through, do the real work
-            # Start with our own moduleName
-            self.CModules = [self.moduleName]
-            for method in (self.constructors + [self.destructor] + self.instanceMethods
-                           + self.upcastMethods + self.downcastMethods + self.staticMethods):
-                if method:
-                    if (not (method.typeDescriptor.moduleName in self.CModules)):
-                        self.CModules.append(method.typeDescriptor.moduleName)
-            self.getCModulesRecursively(self)
-
-            return self.CModules
-
-
-    def getReturnTypeModules(self):
-        """
-        Return a list of all the other shadow class modules this
-        class references.
-        Be careful about nested types
-        """
-        # Return type modules are cached once they are calculated so we
-        # do not have to calculate them again
-        try:
-            return self.returnTypeModules
-        except:
-            moduleList = []
-            upcastMethods = []
-            if (len(self.parentTypes) >= 2):
-                for parentType in self.parentTypes:
-                    for method in parentType.instanceMethods:
-                        upcastMethods.append(method)
-                    for method in parentType.upcastMethods:
-                        upcastMethods.append(method)
-            for method in (self.constructors + [self.destructor] + self.instanceMethods
-                           + self.upcastMethods + self.downcastMethods
-                           + self.staticMethods + upcastMethods):
-                if method:
-                    # Get the real return type (not derived)
-                    returnType = method.typeDescriptor.returnType.recursiveTypeDescriptor()
-                    if (not returnType.isNested):
-                        returnTypeName = returnType.foreignTypeName
-                        # Do not put our own module in the import list
-                        if ((returnTypeName != self.foreignTypeName) and
-                            # Do not put modules already in the list (like a set)
-                            (not (returnTypeName in moduleList))):
-                            # If this is a class (not a primitive), put it on the list
-                            if (returnType.__class__ == ClassTypeDescriptor):
-                                moduleList.append(returnTypeName)
-                    # Now look at all the arguments
-                    argTypes = method.typeDescriptor.argumentTypes
-                    for argType in argTypes:
-                        # Get the real return type (not derived)
-                        argType = argType.typeDescriptor.recursiveTypeDescriptor()
-                        if (not argType.isNested):
-                            argTypeName = argType.foreignTypeName
-                            # Do not put our own module in the import list
-                            if ((argTypeName != self.foreignTypeName) and
-                                # Do not put modules already in the list (like a set)
-                                (not (argTypeName in moduleList))):
-                                # If this is a class (not a primitive), put it on the list
-                                if (argType.__class__ == ClassTypeDescriptor):
-                                    moduleList.append(argTypeName)
-            self.returnTypeModules = moduleList
-            return self.returnTypeModules
-
-
-    def recordClassMethod(self, methodSpec):
-        """
-        Record all class methods in a 2 level dictionary so we can go
-        through them and see which are overloaded
-        { className: {methodName: [methodSpec, methodSpec, methodSpec]}}
-        """
-        methodList = self.overloadedClassMethods.setdefault(methodSpec.name, [])
-        methodList.append(methodSpec)
-
-
-    def recordInstanceMethod(self, methodSpec):
-        """
-        Record all instance methods in a 2 level dictionary so we can go
-        through them and see which are overloaded
-        { className: {methodName: [methodSpec, methodSpec, methodSpec]}}
-        """
-        methodList = self.overloadedInstanceMethods.setdefault(methodSpec.name, [])
-        methodList.append(methodSpec)
-
-
-    def cullOverloadedMethods(self):
-        """
-        Find all the entries that have multiple indexes for the same method name
-        Get rid of all others. Do this for class methods and instance methods
-        """
-        self.overloadedClassMethods = FFIOverload.cullOverloadedMethods(self.overloadedClassMethods)
-        self.overloadedInstanceMethods = FFIOverload.cullOverloadedMethods(self.overloadedInstanceMethods)
-
-
-    def filterOutStaticMethods(self):
-        """
-        Run through the list of instance methods and filter out the
-        ones that are static class methods. We can tell this because they
-        do not have a this pointer in their arg list. Those methods that
-        are static are then placed in a new staticMethods list and the ones
-        that are left are stored back in the instanceMethods list. We are
-        avoiding modifying the instanceMethods list in place while traversing it.
-        Do not check upcast or downcast methods because we know they are not static.
-        """
-        newInstanceMethods = []
-        for method in self.instanceMethods:
-            if method.isStatic():
-                self.staticMethods.append(method)
-            else:
-                newInstanceMethods.append(method)
-        self.instanceMethods = newInstanceMethods
-
-
-    def recordOverloadedMethods(self):
-        """
-        Record all the methods in dictionaries based on method name
-        so we can see if they are overloaded
-        """
-        classMethods = self.constructors + self.staticMethods
-        if self.destructor:
-            classMethods = classMethods + [self.destructor]
-        for method in classMethods:
-            self.recordClassMethod(method)
-
-        instanceMethods = (self.instanceMethods + self.upcastMethods + self.downcastMethods)
-        for method in instanceMethods:
-            self.recordInstanceMethod(method)
-
-
-    def hasMethodNamed(self, methodName):
-        for method in (self.constructors + [self.destructor] + self.instanceMethods
-                       + self.upcastMethods + self.downcastMethods + self.staticMethods):
-            if (method and (method.name == methodName)):
-                return 1
-        return 0
-
-
-    def copyParentMethods(self, file, nesting):
-        """
-        At multiple inheritance nodes, copy all the parent methods into
-        this class and call them after upcasting us to that class
-        """
-        if (len(self.parentTypes) >= 2 or \
-            (len(self.parentTypes) == 1 and self.hasMethodNamed('upcastTo' + self.parentTypes[0].foreignTypeName))):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Upcast inherited instance method wrappers     #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-            for parentType in self.parentTypes:
-                parentList = [parentType]
-                self.copyParentMethodsRecursively(parentList, file, nesting)
-
-
-    def inheritsMethodNamed(self, parentList, methodName):
-        """
-        returns true if the named method is a method on this class, or
-        on any parent class except the last one in the list.
-        """
-        if self.hasMethodNamed(methodName):
-            return 1
-        for pi in range(len(parentList) - 1):
-            if parentList[pi].hasMethodNamed(methodName):
-                return 1
-        return 0
-
-    def copyParentMethodsRecursively(self, parentList, file, nesting):
-        """
-        Copy all the parents instance methods
-        Do not copy functions if this class already has a function with that name
-        We need to recurse up the hierarchy copying all our parents nodes all
-        the way up the tree stopping either at the top, or at another MI node
-        that has already copied his parent's methods in
-        Note: Do not copy the downcast methods
-        """
-        parent = parentList[-1]
-        if (len(parent.parentTypes) > 0):
-            recurse = 1
-        else:
-            recurse = 0
-
-        for method in parent.instanceMethods:
-            if not self.inheritsMethodNamed(parentList, method.name):
-                # with downcast for all instance methods that are not themselves upcasts
-                method.generateInheritedMethodCode(self, parentList, file, nesting, 1)
-
-        # Also duplicate the overloaded method dispatch functions, if
-        # we don't already have any matching methods by this name.
-        for methodSpecList in parent.overloadedInstanceMethods.values():
-            if not self.inheritsMethodNamed(parentList, methodSpecList[0].name):
-                treeColl = FFIOverload.FFIMethodArgumentTreeCollection(self, methodSpecList)
-                treeColl.generateCode(file, nesting)
-
-        # Copy all the parents upcast methods so we transitively pick them up
-        for method in parent.upcastMethods:
-            if not self.inheritsMethodNamed(parentList, method.name):
-                # no downcast for all instance methods that are themselves upcasts
-                # that would cause an infinite loop
-                method.generateInheritedMethodCode(self, parentList, file, nesting, 0)
-
-        # Now recurse up the hierarchy until we get to a node that is itself
-        # a multiple inheritance node and stop there because he will have already
-        # copied all his parent functions in
-        if recurse:
-            for parentType in parent.parentTypes:
-                newParentList = parentList[:]
-                newParentList.append(parentType)
-                self.copyParentMethodsRecursively(newParentList, file, nesting)
-
-
-    def generateOverloadedMethods(self, file, nesting):
-        """
-        Generate code for all the overloaded methods of this class
-        """
-        if (len(self.overloadedClassMethods.values()) or
-            len(self.overloadedInstanceMethods.values())):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Overloaded methods                            #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-        # Overload all the class and instance methods
-        for methodSpecList in (self.overloadedClassMethods.values() +
-                               self.overloadedInstanceMethods.values()):
-            treeColl = FFIOverload.FFIMethodArgumentTreeCollection(self, methodSpecList)
-            treeColl.generateCode(file, nesting)
-
-
-    def generateGlobalCode(self, dir, extensionsDir):
-        """
-        Generate shadow class code for this type.
-        We make our own file form our foreignTypeName and put it in the dir
-        passed in.
-        """
-        fileName = self.foreignTypeName + '.py'
-        fileName1 = self.foreignTypeName + '1.py'
-        file = open(os.path.join(dir, fileName), 'w')
-        indent(file, 0, FFIConstants.generatedHeader)
-        self.outputBaseImports(file)
-        self.generateCode1(file, 0, extensionsDir)
-        file.close()
-
-        file = open(os.path.join(dir, fileName1), 'w')
-        indent(file, 0, FFIConstants.generatedHeader)
-        #self.outputBaseImports(file)
-        self.generateCode2(file, 0, extensionsDir, self.foreignTypeName)
-        file.close()
-
-
-        # Copy in any extensions we may have
-        #self.copyExtensions(extensionsDir, file, 0)
-        #self.outputClassFooter(file)
-        file.close()
-
-
-    def generateCode(self, file, nesting, extensionsDir=None):
-
-        self.recordOverloadedMethods()
-        self.cullOverloadedMethods()
-        self.outputImports(file, nesting)
-        self.outputClassHeader(file, nesting)
-        self.outputClassComment(file, nesting)
-        self.outputClassCModules(file, nesting)
-
-        self.outputNestedTypes(file, nesting)
-
-        indent(file, nesting+1, '\n')
-        indent(file, nesting+1, '##################################################\n')
-        indent(file, nesting+1, '#  Constructors                                  #\n')
-        indent(file, nesting+1, '##################################################\n')
-        indent(file, nesting+1, '\n')
-        self.outputBaseConstructor(file, nesting)
-        if self.constructors:
-            for method in self.constructors:
-                method.generateConstructorCode(self, file, nesting)
-        else:
-            self.outputEmptyConstructor(file, nesting)
-
-        indent(file, nesting+1, '\n')
-        indent(file, nesting+1, '##################################################\n')
-        indent(file, nesting+1, '#  Destructor                                    #\n')
-        indent(file, nesting+1, '##################################################\n')
-        indent(file, nesting+1, '\n')
-        self.outputBaseDestructor(file, nesting)
-        if self.destructor:
-            self.destructor.generateDestructorCode(self, file, nesting)
-        # If you have no destructor, inherit one
-
-        ##########################
-        ## Extension methods moved up locally
-        if extensionsDir:
-            self.copyExtensions(extensionsDir, file, 0)
-
-        ##########################
-        ## import return types
-        returnTypeModules = self.getReturnTypeModules()
-        if len(returnTypeModules):
-            for moduleName in returnTypeModules:
-                indent(file, nesting, 'import ' + moduleName + '\n')
-
-        ################################
-
-
-        if len(self.staticMethods):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Static Methods                                #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-            for method in self.staticMethods:
-                method.generateStaticCode(self, file, nesting)
-
-        if len(self.instanceMethods):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Instance methods                              #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-            for method in self.instanceMethods:
-                method.generateMethodCode(self, file, nesting)
-
-        if len(self.upcastMethods):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Upcast methods                                #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-            for method in self.upcastMethods:
-                method.generateUpcastMethodCode(self, file, nesting)
-
-        if len(self.downcastMethods):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Downcast methods                              #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-            for method in self.downcastMethods:
-                method.generateDowncastMethodCode(self, file, nesting)
-
-        # Copy in all our parent nodes (only does work if we are an MI node)
-        self.copyParentMethods(file, nesting)
-
-        self.generateOverloadedMethods(file, nesting)
-
-    def generateCode1(self, file, nesting, extensionsDir=None):
-
-        self.recordOverloadedMethods()
-        self.cullOverloadedMethods()
-        self.outputImports(file, nesting)
-        self.outputClassHeader(file, nesting)
-        self.outputClassComment(file, nesting)
-        self.outputClassCModules(file, nesting)
-
-        self.outputNestedTypes(file, nesting)
-
-        indent(file, nesting+1, '\n')
-        indent(file, nesting+1, '##################################################\n')
-        indent(file, nesting+1, '#  Constructors                                  #\n')
-        indent(file, nesting+1, '##################################################\n')
-        indent(file, nesting+1, '\n')
-        self.outputBaseConstructor(file, nesting)
-        if self.constructors:
-            for method in self.constructors:
-                method.generateConstructorCode(self, file, nesting)
-        else:
-            self.outputEmptyConstructor(file, nesting)
-
-        indent(file, nesting+1, '\n')
-        indent(file, nesting+1, '##################################################\n')
-        indent(file, nesting+1, '#  Destructor                                    #\n')
-        indent(file, nesting+1, '##################################################\n')
-        indent(file, nesting+1, '\n')
-        self.outputBaseDestructor(file, nesting)
-        if self.destructor:
-            self.destructor.generateDestructorCode(self, file, nesting)
-        # If you have no destructor, inherit one
-        ##########################
-        ## Extension methods moved up locally
-        if extensionsDir:
-            self.copyExtensions(extensionsDir, file, 0)
-
-
-
-    def generateCode2(self, file, nesting, extensionsDir, file1module):
-
-        indent(file, nesting, 'from  ' + file1module + ' import *\n')
-
-        ##########################
-        ## import return types
-        returnTypeModules = self.getReturnTypeModules()
-        if len(returnTypeModules):
-            for moduleName in returnTypeModules:
-                indent(file, nesting, 'import ' + moduleName + '\n')
-
-        ################################
-
-
-        if len(self.staticMethods):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Static Methods                                #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-            for method in self.staticMethods:
-                method.generateStaticCode(self, file, nesting)
-
-        if len(self.instanceMethods):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Instance methods                              #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-            for method in self.instanceMethods:
-                method.generateMethodCode(self, file, nesting)
-
-        if len(self.upcastMethods):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Upcast methods                                #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-            for method in self.upcastMethods:
-                method.generateUpcastMethodCode(self, file, nesting)
-
-        if len(self.downcastMethods):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Downcast methods                              #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-            for method in self.downcastMethods:
-                method.generateDowncastMethodCode(self, file, nesting)
-
-        # Copy in all our parent nodes (only does work if we are an MI node)
-        self.copyParentMethods(file, nesting)
-
-        self.generateOverloadedMethods(file, nesting)
-
-
-    def outputNestedTypes(self, file, nesting):
-        if (len(self.nestedTypes) > 0):
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Nested Types                                  #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-        # Output code in this same file for all our nested types
-        for nestedType in self.nestedTypes:
-            nestedType.generateCode(file, nesting+1)
-
-
-    def copyExtensions(self, extensionsDir, file, nesting):
-        """
-        Copy in the extension file for this class if one exists
-        If you want to extend a C++ file, create a file in the extensions directory and
-        this will append that extension file to the generated code file.
-        """
-        extensionFileName = self.getExtensionModuleName()
-        extensionFilePath = os.path.join(extensionsDir, extensionFileName)
-        if os.path.exists(extensionFilePath):
-            FFIConstants.notify.info('Found extensions for class: ' + self.foreignTypeName)
-            extensionFile = open(extensionFilePath)
-            indent(file, nesting+1, '\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '#  Extension methods                             #\n')
-            indent(file, nesting+1, '##################################################\n')
-            indent(file, nesting+1, '\n')
-            # Copy the contents of the extensions file to the class file verbatim
-            indent(file, nesting, extensionFile.read())
-        else:
-            # No extensions for this class
-            pass
-
-
-    def outputBaseImports(self, file):
-        indent(file, 0, '# CMODULE [' + self.moduleName + ']\n')
-        # Everybody imports types for type checking
-        indent(file, 0, 'from types import IntType, LongType, FloatType, NoneType, StringType\n')
-        indent(file, 0, 'from direct.ffi import FFIExternalObject\n')
-        indent(file, 0, '\n')
-
-        indent(file, 0, '# Import all the C modules this class uses\n')
-        for moduleName in self.getCModules():
-            if moduleName:
-                indent(file, 0, 'import ' + moduleName + '\n')
-                indent(file, 0, 'import ' + moduleName + 'Downcasts\n')
-        indent(file, 0, '\n')
-        indent(file, 0, 'from direct.ffi import FFIExternalObject\n')
-
-
-
-    def outputImportsRecursively(self, parent, file, nesting):
-        # Not sure why we need to import parent types...
-        #for parentType in parent.parentTypes:
-        #    self.outputImportsRecursively(parentType, file, nesting)
-
-        parentTypeName = parent.foreignTypeName
-        fullNestedName = parent.getFullNestedName()
-        if (fullNestedName != parentTypeName):
-            nestedChain = fullNestedName.split(".")
-            moduleName = nestedChain[0]
-            indent(file, nesting, 'import ' + moduleName + '\n')
-        else:
-            indent(file, nesting, 'import ' + parent.foreignTypeName + '\n')
-
-        #returnTypeModules = parent.getReturnTypeModules()
-        #if len(returnTypeModules):
-        #    for moduleName in returnTypeModules:
-        #        indent(file, nesting, 'import ' + moduleName + '\n')
-
-
-    def outputImports(self, file, nesting):
-        """
-        Generate code that imports the modules we need for this class
-        """
-        indent(file, nesting, '# Import everybody we inherit from\n')
-        indent(file, nesting, '# and all the shadow class modules this class uses\n')
-
-        # Output all of our return types
-        #returnTypeModules = self.getReturnTypeModules()
-        #if len(returnTypeModules):
-        #    for moduleName in returnTypeModules:
-        #        indent(file, nesting, 'import ' + moduleName + '\n')
-
-        for parentType in self.parentTypes:
-            self.outputImportsRecursively(parentType, file, nesting)
-        indent(file, nesting, '\n')
-
-
-
-    def outputClassComment(self, file, nesting):
-        """
-        Output the class comment to the file
-        """
-        if FFIConstants.wantComments:
-            if self.comment:
-                indent(file, nesting+1, ('"' * 3) + '\n')
-                # To insert tabs into the comment, replace all newlines with a newline+tabs
-                comment = string.replace(self.comment,
-                                         '\n', ('\n' + ('    ' * (nesting+1))))
-                indent(file, nesting+1, comment)
-                file.write('\n')
-                indent(file, nesting+1, ('"' * 3) + '\n\n')
-
-    def outputClassHeader(self, file, nesting):
-        """
-        Output the class definition to the file
-        """
-
-        if (self.foreignTypeName == ''):
-            FFIConstants.notify.warning('Class with no name')
-
-
-#          # If this is the toplevel, we need to delay the generation of this
-#          # class to avoid circular imports, so put the entire class in a function
-#          # that we will call later
-#          if (nesting==0):
-#              indent(file, nesting, '# Delay the definition of this class until all the imports are done\n')
-#              indent(file, nesting, '# Make sure we only define this class once\n')
-#              indent(file, nesting, 'classDefined = 0\n')
-#              indent(file, nesting, 'def generateClass_' + self.foreignTypeName + '():\n')
-#              indent(file, nesting, ' if classDefined: return\n')
-#              indent(file, nesting, ' global classDefined\n')
-#              indent(file, nesting, ' classDefined = 1\n')
-#              # Start the class definition indented a space to account for the function
-#              indent(file, nesting, ' class ' + self.foreignTypeName)
-#          else:
-#              # Start the class definition
-#              indent(file, nesting, 'class ' + self.foreignTypeName)
-
-        indent(file, nesting, 'class ' + self.foreignTypeName)
-
-        # Everybody inherits from FFIExternalObject
-        file.write('(')
-        # Also inherit from all of your parentTypes
-        for i in range(len(self.parentTypes)):
-            parentTypeName = self.parentTypes[i].foreignTypeName
-            moduleName = parentTypeName
-            # assuming the type "Node" is stored in module "Node.py"
-            # and we have done an "import Node", we need to then
-            # inherit from Node.Node
-            # Actually it is trickier than this. If we import from a
-            # nested class, we need to inherit from something like:
-            #     parentClassModule.parentClass.nestedClass
-            fullNestedName = self.parentTypes[i].getFullNestedName()
-            if (fullNestedName != parentTypeName):
-                nestedChain = fullNestedName.split(".")
-                moduleName = nestedChain[0]
-                parentTypeName = fullNestedName
-            file.write(moduleName + '.' + parentTypeName)
-            file.write(', ')
-        file.write('FFIExternalObject.FFIExternalObject):\n')
-
-
-    def outputClassCModules(self, file, nesting):
-        # Store the class C modules for the class so they do not
-        # get garbage collected before we do
-        # TODO: this did not appear to work so I'm taking it out
-        #indent(file, nesting+1, '__CModules__ = [')
-        #for moduleName in self.getCModules():
-        #     file.write(moduleName + ',')
-        #file.write(']\n')
-
-        # Store the downcast function modules so the FFIExternalObject
-        # can index into them to find the downcast functions
-        indent(file, nesting+1, '__CModuleDowncasts__ = (')
-        for moduleName in self.getCModules():
-            file.write(moduleName + 'Downcasts,')
-        file.write(')\n')
-
-
-    def outputClassFooter(self, file):
-        #indent(file, 0, " # When this class gets defined, put it in this module's namespace\n")
-        #indent(file, 0, " globals()['" + self.foreignTypeName + "'] = " + self.foreignTypeName + '\n')
-        pass
-
-
-    def outputBaseConstructor(self, file, nesting):
-        """
-        Output the __init__ constructor for this class.
-        There is special login if you pass in None to the constructor, you
-        will not get an actual C object with memory, you will just get the
-        shadow class shell object. This is useful for functions that want
-        to return this type that already have a this pointer and just need
-        to construct a shadow object to contain it.
-        """
-
-        indent(file, nesting+1, 'def __init__(self, *_args):\n')
-        indent(file, nesting+2, '# Do Not Initialize the super class it is inlined\n')
-        #indent(file, nesting+2, '# Initialize the super class\n')
-        #indent(file, nesting+2, 'FFIExternalObject.FFIExternalObject.__init__(self)\n')
-        ## this is not the right way to do this any more..
-        indent(file, nesting+2, '# If you want an empty shadow object use the FFIInstance(class) function\n')
-        #indent(file, nesting+2, 'if ((len(_args) == 1) and (_args[0] == None)):\n')
-        #indent(file, nesting+3, 'return\n')
-        #indent(file, nesting+2, '# Otherwise, call the C constructor\n')
-        indent(file, nesting+2, 'self.constructor(*_args)\n')
-        indent(file, nesting, '\n')
-
-
-
-
-    def outputEmptyConstructor(self, file, nesting):
-        """
-        If there is no C++ constructor, we output code for a runtime error
-        You really do not want to create a class with a null this pointer
-        """
-        indent(file, nesting+1, 'def constructor(self):\n')
-        indent(file, nesting+2, "raise RuntimeError, 'No C++ constructor defined for class: ' + self.__class__.__name__\n")
-
-
-    def outputBaseDestructor(self, file, nesting):
-        """
-        This destructor overwrites the builtin Python destructor
-        using the __del__ method. This will get called whenever a
-        Python object is garbage collected. We are going to overwrite
-        it with special cleanup for Panda.
-        """
-        if self.destructor:
-            indent(file, nesting+1, 'def __del__(self):\n')
-
-            # Reference counting is now handled in the C++ code
-            # indent(file, nesting+2, 'if isinstance(self, ReferenceCount):\n')
-            # indent(file, nesting+3, 'self.unref()\n')
-            # indent(file, nesting+3, 'if (self.getCount() == 0):\n')
-            # indent(file, nesting+4, 'self.destructor()\n')
-
-            # If the scripting language owns the memory for this object,
-            # we need to call the C++ destructor when Python frees the
-            # shadow object, but only if the userManagesMemory flag is set.
-            # Also make sure we are not destructing a null pointer
-            indent(file, nesting+2, 'if (self.userManagesMemory and (self.this != 0)):\n')
-            self.destructor.outputDestructorBody(self, file, nesting+1)
-            indent(file, nesting, '\n')
-
-            #indent(file, nesting+3, 'self.destructor()\n')
-
-
-    def outputEmptyDestructor(self, file, nesting):
-        """
-        If there is no C++ destructor, we just output this
-        empty one instead
-        """
-        indent(file, nesting+1, 'def destructor(self):\n')
-        indent(file, nesting+2, "raise RuntimeError, 'No C++ destructor defined for class: ' + self.__class__.__name__\n")
-
-
-    def generateReturnValueWrapper(self, classTypeDesc, file, userManagesMemory,
-                                   needsDowncast, nesting):
-        """
-        Generate code that creates a shadow object of this type
-        then sets the this pointer and returns the object. We call the
-        class destructor with None as the only parameter to get an
-        empty shadow object.
-        """
-        #if classTypeDesc != self:
-        #    indent(file, nesting, 'import ' + self.foreignTypeName + '\n')
-        indent(file, nesting, 'if returnValue == 0: return None\n')
-        # Do not put Class.Class if this file is the file that defines Class
-        # Also check for nested classes. They do not need the module name either
-        typeName = FFIOverload.getTypeName(classTypeDesc, self)
-        #file.write(typeName + '(None)\n')
-        ### inline the old constructers
-
-        #indent(file, nesting, 'returnObject = ')
-        #file.write('FFIExternalObject.FFIInstance('+ typeName + ', returnValue,'+str(userManagesMemory)+')\n')
-        #indent(file, nesting, 'returnObject.this = 0\n')
-        #indent(file, nesting, 'returnObject.userManagesMemory = 0\n');
-
-        ##
-        #indent(file, nesting, 'returnObject.this = returnValue\n')
-        # Zero this pointers get returned as the Python None object
-        #indent(file, nesting, 'if (returnObject.this == 0): return None\n')
-        #if userManagesMemory:
-        #    indent(file, nesting, 'returnObject.userManagesMemory = 1\n')
-        #else:
-        #    indent(file, nesting, 'returnObject.userManagesMemory = 0\n')
-
-        if needsDowncast:
-            #indent(file, nesting, 'returnObject = FFIExternalObject.FFIInstance('+ typeName + ', returnValue,'+str(userManagesMemory)+')\n')
-            if (FFIOverload.inheritsFrom(self, TypedObjectDescriptor) or
-                self == TypedObjectDescriptor):
-                #indent(file, nesting, 'return returnObject.setPointer()\n')
-                indent(file, nesting, 'return FFIExternalObject.FFIInstance('+ typeName + ', returnValue,'+str(userManagesMemory)+').setPointer()\n')
-            else:
-                indent(file, nesting,'return FFIExternalObject.FFIInstance('+ typeName + ', returnValue,'+str(userManagesMemory)+')\n')
-                #indent(file, nesting, 'return returnObject\n')
-        else:
-            indent(file, nesting,'return FFIExternalObject.FFIInstance('+ typeName + ', returnValue,'+str(userManagesMemory)+')\n')
-            #indent(file, nesting, 'return returnObject\n')
-
-
-
-class FunctionTypeDescriptor(BaseTypeDescriptor):
-    """
-    A C++ function type. It knows its returnType, arguments, etc.
-    """
-    def __init__(self):
-        BaseTypeDescriptor.__init__(self)
-        self.returnType = None
-        self.argumentTypes = []
-        self.userManagesMemory = 0
-        self.isVirtual = 0
-        self.moduleName = ''
-        self.wrapperName = ''
-        self.returnValueDestructor = None
-    def thislessArgTypes(self):
-        """
-        It is often useful to know the list of arguments excluding the
-        this parameter (if there was one)
-        """
-        return filter(lambda type: (not type.isThis), self.argumentTypes)
-

+ 0 - 12
direct/src/ffi/Sources.pp

@@ -1,12 +0,0 @@
-#define INSTALL_SCRIPTS genPyCode.py
-#define INSTALL_MODULES panda3d.py
-
-// If we're on Win32 without Cygwin, install the genPyCode.bat file;
-// for all other platforms, install the genPyCode sh script.
-#if $[MAKE_BAT_SCRIPTS]
-  #define INSTALL_SCRIPTS $[INSTALL_SCRIPTS] genPyCode.bat
-#else
-  #define INSTALL_SCRIPTS $[INSTALL_SCRIPTS] genPyCode
-#endif
-
-#include $[THISDIRPREFIX]genPyCode.pp

+ 0 - 0
direct/src/ffi/__init__.py


+ 0 - 220
direct/src/ffi/genPyCode.pp

@@ -1,220 +0,0 @@
-//
-// genPyCode.pp
-//
-// This file defines the script to auto-generate a sensible genPyCode
-// for the user based on the Config.pp variables in effect at the time
-// ppremake is run.  The generated script will know which directories
-// to generate its output to, as well as which source files to read
-// for the input.
-//
-
-#define install_dir $[$[upcase $[PACKAGE]]_INSTALL]
-#define install_lib_dir $[or $[INSTALL_LIB_DIR],$[install_dir]/lib]
-#define install_bin_dir $[or $[INSTALL_BIN_DIR],$[install_dir]/bin]
-#define install_igatedb_dir $[or $[INSTALL_IGATEDB_DIR],$[install_dir]/etc]
-
-#define python $[PYTHON_COMMAND]
-#if $[USE_DEBUG_PYTHON]
-  #define python $[PYTHON_DEBUG_COMMAND]
-#endif
-
-// If we're on Win32 without Cygwin, generate a genPyCode.bat file;
-// for all other platforms, generate a genPyCode sh script.  Although
-// it's true that on non-Win32 platforms we don't need the script
-// (since the python file itself could be made directly executable),
-// we generate the script anyway to be consistent with Win32, which
-// does require it.
-
-#if $[MAKE_BAT_SCRIPTS]
-
-#output genPyCode.bat
-@echo off
-rem #### Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]].
-rem ################################# DO NOT EDIT ###########################
-
-$[python] -u $[osfilename $[install_bin_dir]/genPyCode.py] %1 %2 %3 %4 %5 %6 %7 %8 %9
-#end genPyCode.bat
-
-#else  // MAKE_BAT_SCRIPTS
-
-#output genPyCode $[if $[>= $[PPREMAKE_VERSION],1.21],binary]
-#! /bin/sh
-#### Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]].
-################################# DO NOT EDIT ###########################
-
-#if $[CTPROJS]
-# This script was generated while the user was using the ctattach
-# tools.  That had better still be the case.
-#if $[WINDOWS_PLATFORM]
-$[python] -u `cygpath -w $DIRECT/built/bin/genPyCode.py` "$@"
-#else
-$[python] -u $DIRECT/built/bin/genPyCode.py "$@"
-#endif
-#else
-$[python] -u '$[osfilename $[install_bin_dir]/genPyCode.py]' "$@"
-#endif
-#end genPyCode
-
-#endif  // MAKE_BAT_SCRIPTS
-
-#output genPyCode.py
-#! /usr/bin/env $[python]
-#### Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]].
-################################# DO NOT EDIT ###########################
-
-import os
-import sys
-import glob
-
-#if $[CTPROJS]
-# This script was generated while the user was using the ctattach
-# tools.  That had better still be the case.
-
-def deCygwinify(path):
-    if os.name in ['nt'] and path[0] == '/':
-        # On Windows, we may need to convert from a Cygwin-style path
-        # to a native Windows path.
-
-        # Check for a case like /i/ or /p/: this converts
-        # to i:/ or p:/.
-
-        dirs = path.split('/')
-        if len(dirs) > 2 and len(dirs[1]) == 1:
-            path = '%s:\%s' % (dirs[1], '\\'.join(dirs[2:]))
-
-        else:
-            # Otherwise, prepend $PANDA_ROOT and flip the slashes.
-            pandaRoot = os.getenv('PANDA_ROOT')
-            if pandaRoot:
-                path = os.path.normpath(pandaRoot + path)
-
-    return path
-
-ctprojs = os.getenv('CTPROJS')
-if not ctprojs:
-    print "You are no longer attached to any trees!"
-    sys.exit(1)
-    
-directDir = os.getenv('DIRECT')
-if not directDir:
-    print "You are not attached to DIRECT!"
-    sys.exit(1)
-
-directDir = deCygwinify(directDir)
-
-# Make sure that direct.showbase.FindCtaPaths gets imported.
-parent, base = os.path.split(directDir)
-
-if parent not in sys.path:
-    sys.path.append(parent)
-
-import direct.showbase.FindCtaPaths
-
-#endif  // CTPROJS
-
-from direct.ffi import DoGenPyCode
-from direct.ffi import FFIConstants
-
-# The following parameters were baked in to this script at the time
-# ppremake was run in Direct.
-#define extensions_name $[if $[PYTHON_NATIVE],extensions_native,extensions]
-
-#if $[>= $[OPTIMIZE], 4]
-FFIConstants.wantComments = 0
-FFIConstants.wantTypeChecking = 0
-#endif
-
-DoGenPyCode.interrogateLib = r'libdtoolconfig'
-DoGenPyCode.codeLibs = r'$[GENPYCODE_LIBS]'.split()
-DoGenPyCode.native = $[if $[PYTHON_NATIVE],1,0]
-
-#if $[not $[CTPROJS]]
-// Since the user is not using ctattach, bake these variables in too.
-DoGenPyCode.directDir = r'$[osfilename $[TOPDIR]]'
-DoGenPyCode.outputCodeDir = r'$[osfilename $[install_lib_dir]/pandac]'
-DoGenPyCode.outputHTMLDir = r'$[osfilename $[install_data_dir]/doc]'
-DoGenPyCode.extensionsDir = r'$[osfilename $[TOPDIR]/src/$[extensions_name]]'
-DoGenPyCode.etcPath = [r'$[osfilename $[install_igatedb_dir]]']
-DoGenPyCode.pythonSourcePath = r'$[osfilename $[TOPDIR]]'
-
-#else
-# The user is expected to be using ctattach, so don't bake in the
-# following four; these instead come from the dynamic settings set by
-# ctattach.
-
-DoGenPyCode.directDir = directDir
-DoGenPyCode.outputCodeDir = os.path.join(directDir, 'built', 'lib', 'pandac')
-DoGenPyCode.outputHTMLDir = os.path.join(directDir, 'built', 'shared', 'doc')
-DoGenPyCode.extensionsDir = os.path.join(directDir, 'src', '$[extensions_name]')
-DoGenPyCode.etcPath = []
-DoGenPyCode.pythonSourcePath = []
-
-#if $[CTA_GENERIC_GENPYCODE]
-# Look for additional packages (other than the basic three)
-# that the user might be dynamically attached to.
-packages = []
-for proj in ctprojs.split():
-    projName = proj.split(':')[0]
-    packages.append(projName)
-packages.reverse()
-
-try:
-    from direct.extensions_native.extension_native_helpers import Dtool_PreloadDLL
-except ImportError:
-    print "Unable to import Dtool_PreloadDLL, not trying generic libraries."
-else:
-    for package in packages:
-        packageDir = os.getenv(package)
-        if packageDir:
-            packageDir = deCygwinify(packageDir)
-            etcDir = os.path.join(packageDir, 'etc')
-            try:
-                inFiles = glob.glob(os.path.join(etcDir, 'built', '*.in'))
-            except:
-                inFiles = []
-            if inFiles:
-                DoGenPyCode.etcPath.append(etcDir)
-
-            if package not in ['WINTOOLS', 'DTOOL', 'DIRECT', 'PANDA']:
-                DoGenPyCode.pythonSourcePath.append(packageDir)
-
-                libDir = os.path.join(packageDir, 'built', 'lib')
-                try:
-                    files = os.listdir(libDir)
-                except:
-                    files = []
-                for file in files:
-                    if os.path.isfile(os.path.join(libDir, file)):
-                        basename, ext = os.path.splitext(file)
-
-                        # Try to import the library.  If we can import it,
-                        # instrument it.
-                        try:
-                            Dtool_PreloadDLL(basename)
-                            # __import__(basename, globals(), locals())
-                            isModule = 1
-                        except:
-                            isModule = 0
-
-                        # 
-                        # RHH.... hack OPT2 .. py debug libraries...
-                        #
-                        if not isModule:
-                            # debug py library magin naming in windows..
-                            basename = basename.replace('_d','')                   
-                            try:
-                                Dtool_PreloadDLL(basename)
-                                # __import__(basename, globals(), locals())
-                                isModule = 1
-                            except:
-                                isModule = 0                        
-
-                        if isModule:
-                            if basename not in DoGenPyCode.codeLibs:
-                                DoGenPyCode.codeLibs.append(basename)
-#endif  // CTA_GENERIC_GENPYCODE
-#endif  // CTPROJS
-
-DoGenPyCode.run()
-
-#end genPyCode.py

+ 0 - 96
direct/src/ffi/jGenPyCode.py

@@ -1,96 +0,0 @@
-##############################################################
-#
-# This module should be invoked by a shell-script that says:
-#
-#    python -c "import direct.ffi.jGenPyCode" <arguments>
-#
-# Before invoking python, the shell-script may need to set
-# these environment variables, to make sure that everything
-# can be located appropriately.
-#
-#    PYTHONPATH
-#    PATH
-#    LD_LIBRARY_PATH
-#
-##############################################################
-
-import sys, os
-
-##############################################################
-#
-# Locate the 'direct' tree and the 'pandac' tree.
-#
-##############################################################
-
-DIRECT=None
-PANDAC=None
-
-for dir in sys.path:
-    if (DIRECT is None):
-        if (dir != "") and os.path.exists(os.path.join(dir,"direct")):
-            DIRECT=os.path.join(dir,"direct")
-    if (PANDAC is None):
-        if (dir != "") and (os.path.exists(os.path.join(dir,"pandac"))):
-            PANDAC=os.path.join(dir,"pandac")
-
-if (DIRECT is None):
-    sys.exit("Could not locate the 'direct' python modules")
-if (PANDAC is None):
-    sys.exit("Could not locate the 'pandac' python modules")
-
-##############################################################
-#
-# Locate direct/src/extensions.
-#
-# It could be inside the direct tree.  It may be underneath
-# a 'src' subdirectory.  Or, the direct tree may actually be
-# a stub that points to the source tree.
-#
-##############################################################
-
-EXTENSIONS=None
-
-if (EXTENSIONS is None):
-  if os.path.isdir(os.path.join(DIRECT,"src","extensions_native")):
-    EXTENSIONS=os.path.join(DIRECT,"src","extensions_native")
-
-if (EXTENSIONS is None):
-  if os.path.isdir(os.path.join(DIRECT,"extensions_native")):
-    EXTENSIONS=os.path.join(DIRECT,"extensions_native")
-
-if (EXTENSIONS is None):
-  if os.path.isdir(os.path.join(DIRECT,"..","..","direct","src","extensions_native")):
-    EXTENSIONS=os.path.join(DIRECT,"..","..","direct","src","extensions_native")
-
-if (EXTENSIONS is None):
-  sys.exit("Could not locate direct/src/extensions_native")
-
-##############################################################
-#
-# Call genpycode with default paths.
-#
-##############################################################
-
-from direct.ffi import DoGenPyCode
-DoGenPyCode.outputCodeDir = PANDAC
-DoGenPyCode.outputHTMLDir = os.path.join(PANDAC,"..","doc")
-DoGenPyCode.directDir = DIRECT
-DoGenPyCode.extensionsDir = EXTENSIONS
-DoGenPyCode.interrogateLib = r'libdtoolconfig'
-DoGenPyCode.codeLibs = ['libpandaexpress','libpanda','libpandaphysics','libpandafx','libp3direct','libpandaskel','libpandaegg','libpandaode']
-DoGenPyCode.etcPath = [os.path.join(PANDAC,"input")]
-DoGenPyCode.pythonSourcePath = [DIRECT]
-DoGenPyCode.native = 1
-
-#print "outputDir = ", DoGenPyCode.outputDir
-#print "directDir = ", DoGenPyCode.directDir
-#print "extensionsDir = ", DoGenPyCode.extensionsDir
-#print "interrogateLib = ", DoGenPyCode.interrogateLib
-#print "codeLibs = ", DoGenPyCode.codeLibs
-#print "etcPath = ", DoGenPyCode.etcPath
-#print "native = ", DoGenPyCode.native
-
-DoGenPyCode.run()
-
-os._exit(0)
-

+ 0 - 275
direct/src/ffi/panda3d.py

@@ -1,275 +0,0 @@
-#!/bin/true
-import os, sys, imp
-
-panda3d_modules = {
-    "core"        :("libpandaexpress", "libpanda"),
-    "dtoolconfig" : "libp3dtoolconfig",
-    "physics"     : "libpandaphysics",
-    "fx"          : "libpandafx",
-    "direct"      : "libp3direct",
-    "egg"         : "libpandaegg",
-    "ode"         : "libpandaode",
-    "bullet"      : "libpandabullet",
-    "vision"      : "libp3vision",
-    "physx"       : "libpandaphysx",
-    "ai"          : "libpandaai",
-    "awesomium"   : "libp3awesomium",
-    "speedtree"   : "libpandaspeedtree",
-    "rocket"      :("_rocketcore", "_rocketcontrols", "libp3rocket"),
-    "vrpn"        : "libp3vrpn",
-}
-
-class panda3d_import_manager:
-    # Important: store a reference to the sys and os modules, as
-    # all references in the global namespace will be reset.
-    os = os
-    sys = sys
-    imp = imp
-
-    __libraries__ = {}
-
-    # Figure out the dll suffix (commonly, _d for windows debug builds),
-    # and the dll extension.
-    dll_suffix = ''
-    dll_exts = ('.pyd', '.so')
-    if sys.platform == "win32":
-        dll_exts = ('.pyd', '.dll')
-
-        # We allow the caller to preload dll_suffix into the sys module.
-        dll_suffix = getattr(sys, 'dll_suffix', None)
-
-        if dll_suffix is None:
-            # Otherwise, we try to determine it from the executable name:
-            # python_d.exe implies _d across the board.
-            dll_suffix = ''
-            if sys.executable.endswith('_d.exe'):
-                dll_suffix = '_d'
-
-    # On OSX, extension modules can be loaded from either .so or .dylib.
-    if sys.platform == "darwin":
-        dll_exts = ('.pyd', '.so', '.dylib')
-
-    prepared = False
-
-    @classmethod
-    def __prepare(cls):
-        # This method only needs to be called once.
-        if cls.prepared:
-            return
-        cls.prepared = True
-
-        # First, we must ensure that the library path is
-        # modified to locate all of the dynamic libraries.
-        target = None
-        filename = "libpandaexpress" + cls.dll_suffix
-        for dir in cls.sys.path + [cls.sys.prefix]:
-            lib = cls.os.path.join(dir, filename)
-            for dll_ext in cls.dll_exts:
-                if (cls.os.path.exists(lib + dll_ext)):
-                    target = dir
-                    break
-        if target == None:
-            raise ImportError("Cannot find %s" % (filename))
-        target = cls.os.path.abspath(target)
-
-        # And add that directory to the system library path.
-        if cls.sys.platform == "win32":
-            cls.__prepend_to_path("PATH", target)
-        else:
-            cls.__prepend_to_path("LD_LIBRARY_PATH", target)
-
-        if cls.sys.platform == "darwin":
-            cls.__prepend_to_path("DYLD_LIBRARY_PATH", target)
-
-    @classmethod
-    def __prepend_to_path(cls, varname, target):
-        """ Prepends the given directory to the
-        specified search path environment variable. """
-
-        # Get the current value
-        if varname in cls.os.environ:
-            path = cls.os.environ[varname].strip(cls.os.pathsep)
-        else:
-            path = ""
-
-        # Prepend our value, if it's not already the first thing
-        if len(path) == 0:
-            cls.os.environ[varname] = target
-        elif not path.startswith(target):
-            cls.os.environ[varname] = target + cls.os.pathsep + path
-
-    @classmethod
-    def libimport(cls, name):
-        """ Imports and returns the specified library name. The
-        provided library name has to be without dll extension. """
-
-        if name in cls.__libraries__:
-            return cls.__libraries__[name]
-
-        if not cls.prepared: cls.__prepare()
-
-        # Try to import it normally first.
-        try:
-            return __import__(name)
-        except ImportError:
-            _, err, _ = cls.sys.exc_info()
-            if str(err) != "No module named " + name and \
-               str(err) != "No module named '%s'" % name:
-                raise
-
-        # Hm, importing normally didn't work. Let's try imp.load_dynamic.
-        # But first, locate the desired library.
-        target = None
-        filename = name + cls.dll_suffix
-        for dir in cls.sys.path + [cls.sys.prefix]:
-            lib = cls.os.path.join(dir, filename)
-            for dll_ext in cls.dll_exts:
-                if (cls.os.path.exists(lib + dll_ext)):
-                    target = lib + dll_ext
-                    break
-            if target:
-                # Once we find the first match, break all the way
-                # out--don't keep looking for a second match.
-                break
-        if target == None:
-            message = "DLL loader cannot find %s." % name
-            raise ImportError(message)
-        target = cls.os.path.abspath(target)
-
-        # Now import the file explicitly.
-        lib = cls.imp.load_dynamic(name, target)
-        cls.__libraries__[name] = lib
-        return lib
-
-class panda3d_submodule(type(sys)):
-    """ Represents a submodule of 'panda3d' that represents a dynamic
-    library. This dynamic library is loaded when something is accessed
-    from the module. """
-
-    __manager__ = panda3d_import_manager
-
-    def __init__(self, name, library):
-        type(sys).__init__(self, "panda3d." + name)
-        self.__library__ = library
-        self.__libraries__ = [self.__library__]
-
-    def __load__(self):
-        """ Forces the library to be loaded right now. """
-        self.__manager__.libimport(self.__library__)
-
-    def __getattr__(self, name):
-        mod = self.__manager__.libimport(self.__library__)
-        if name == "__all__":
-            everything = []
-            for obj in mod.__dict__.keys():
-                if not obj.startswith("__"):
-                    everything.append(obj)
-            self.__all__ = everything
-            return everything
-        elif name == "__library__":
-            return self.__library__
-        elif name == "__libraries__":
-            return self.__libraries__
-        elif name in mod.__dict__.keys():
-            value = mod.__dict__[name]
-            setattr(self, name, value)
-            return value
-
-        # Not found? Raise the error that Python would normally raise.
-        raise AttributeError("'module' object has no attribute '%s'" % name)
-
-class panda3d_multisubmodule(type(sys)):
-    """ Represents a submodule of 'panda3d' that represents multiple
-    dynamic libraries. These are loaded when something is accessed
-    from the module. """
-
-    __manager__ = panda3d_import_manager
-
-    def __init__(self, name, libraries):
-        type(sys).__init__(self, "panda3d." + name)
-        self.__libraries__ = libraries
-
-    def __load__(self):
-        """ Forces the libraries to be loaded right now. """
-        err = []
-        for lib in self.__libraries__:
-            try:
-                self.__manager__.libimport(lib)
-            except ImportError:
-                _, msg, _ = self.__manager__.sys.exc_info()
-                err.append(str(msg).rstrip('.'))
-        if len(err) > 0:
-            raise ImportError(', '.join(err))
-
-    def __getattr__(self, name):
-        if name == "__all__":
-            everything = []
-            for lib in self.__libraries__:
-                for obj in self.__manager__.libimport(lib).__dict__:
-                    if not obj.startswith("__"):
-                        everything.append(obj)
-            self.__all__ = everything
-            return everything
-        elif name == "__libraries__":
-            return self.__libraries__
-
-        for lib in self.__libraries__:
-            mod = self.__manager__.libimport(lib)
-            if name in mod.__dict__:
-                value = mod.__dict__[name]
-                setattr(self, name, value)
-                return value
-
-        # Not found? Raise the error that Python would normally raise.
-        raise AttributeError("'module' object has no attribute '%s'" % name)
-
-class panda3d_module(type(sys)):
-    """ Represents the main 'panda3d' module. """
-
-    __file__ = __file__
-    modules = panda3d_modules
-    __manager__ = panda3d_import_manager
-
-    def __load__(self):
-        """ Force all the libraries to be loaded right now. """
-        err = []
-        for module in self.modules:
-            try:
-                self.__manager__.sys.modules["panda3d.%s" % module].__load__()
-            except ImportError:
-                _, msg, _ = self.__manager__.sys.exc_info()
-                err.append(str(msg).rstrip('.'))
-        if len(err) > 0:
-            raise ImportError(', '.join(err))
-
-
-    def __getattr__(self, name):
-        if name == "__all__":
-            self.__all__ = name
-            return self.modules.keys()
-        elif name == "__file__":
-            return self.__file__
-        elif name in self.modules:
-            value = self.__manager__.sys.modules["panda3d.%s" % name]
-            setattr(self, name, value)
-            return value
-
-        # Not found? Raise the error that Python would normally raise.
-        raise AttributeError("'module' object has no attribute '%s'" % name)
-
-# Create the fake module objects and insert them into sys.modules.
-this = panda3d_module("panda3d")
-
-# Loop through the module dictionary, create a fake
-# module for each of them, and insert them into
-# sys.modules and into the 'panda3d' fake module.
-for mod, lib in panda3d_modules.items():
-    if isinstance(lib, tuple):
-        module = panda3d_multisubmodule(mod, lib)
-    else:
-        module = panda3d_submodule(mod, lib)
-    sys.modules["panda3d." + mod] = module
-    this.__dict__[mod] = module
-
-# Important: this must be the last thing in this file
-sys.modules["panda3d"] = this

+ 0 - 4
direct/src/filter/Sources.pp

@@ -1,4 +0,0 @@
-
-#defer install_data_dir $[install_lib_dir]/$[PACKAGE]/$[DIRNAME]
-#define INSTALL_DATA filter-bloomi.sha filter-bloomx.sha filter-bloomy.sha filter-blurx.sha filter-blury.sha filter-copy.sha filter-down4.sha
-

+ 0 - 0
direct/src/fsm/Sources.pp


+ 0 - 0
direct/src/gui/Sources.pp


+ 0 - 38
direct/src/http/Sources.pp

@@ -1,38 +0,0 @@
-#define BUILD_DIRECTORY $[WANT_NATIVE_NET]
-#define USE_PACKAGES native_net
-
-#define OTHER_LIBS \
-    p3express:c pandaexpress:m \
-    p3pstatclient:c p3pipeline:c panda:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3dtoolutil:c p3dtoolbase:c p3dtool:m p3prc:c p3pandabase:c \
-    $[if $[HAVE_NET],p3net:c] $[if $[WANT_NATIVE_NET],p3nativenet:c] \
-    p3linmath:c p3putil:c
-
-#define LOCAL_LIBS \
-    p3directbase
-#define C++FLAGS -DWITHIN_PANDA
-#define UNIX_SYS_LIBS m
-#define USE_PACKAGES python
-
-#begin lib_target
-  #define TARGET  p3http
-
-  #define COMBINED_SOURCES $[TARGET]_composite1.cxx  
-
-
-  #define SOURCES \
-     config_http.h \
-     http_connection.h  \
-     http_request.h
-
-
-  #define INCLUDED_SOURCES \
-     config_http.cxx \
-     http_connection.cxx \
-     parsedhttprequest.cxx  \
-     http_request.cxx
-
-
-  #define IGATESCAN all
-#end lib_target

+ 0 - 54
direct/src/interval/Sources.pp

@@ -1,54 +0,0 @@
-#begin lib_target
-  #define TARGET p3interval
-  #define LOCAL_LIBS \
-    p3directbase
-  #define OTHER_LIBS \
-    p3downloader:c p3linmath:c \
-    p3chan:c p3event:c p3gobj:c p3pnmimage:c p3mathutil:c \
-    p3pgraph:c p3putil:c panda:m p3express:c pandaexpress:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3pandabase:c p3prc:c p3gsgbase:c p3pstatclient:c \
-    $[if $[HAVE_NET],p3net:c] $[if $[WANT_NATIVE_NET],p3nativenet:c] \
-    p3pipeline:c
-
-  #define SOURCES \
-    config_interval.cxx config_interval.h \
-    cInterval.cxx cInterval.I cInterval.h \
-    cIntervalManager.cxx cIntervalManager.I cIntervalManager.h \
-    cConstraintInterval.cxx cConstraintInterval.I cConstraintInterval.h \
-    cConstrainTransformInterval.cxx cConstrainTransformInterval.I cConstrainTransformInterval.h \
-    cConstrainPosInterval.cxx cConstrainPosInterval.I cConstrainPosInterval.h \
-    cConstrainHprInterval.cxx cConstrainHprInterval.I cConstrainHprInterval.h \
-    cConstrainPosHprInterval.cxx cConstrainPosHprInterval.I cConstrainPosHprInterval.h \
-    cLerpInterval.cxx cLerpInterval.I cLerpInterval.h \
-    cLerpNodePathInterval.cxx cLerpNodePathInterval.I cLerpNodePathInterval.h \
-    cLerpAnimEffectInterval.cxx cLerpAnimEffectInterval.I cLerpAnimEffectInterval.h \
-    cMetaInterval.cxx cMetaInterval.I cMetaInterval.h \
-    hideInterval.cxx hideInterval.I hideInterval.h \
-    lerpblend.cxx lerpblend.h \
-    showInterval.cxx showInterval.I showInterval.h \
-    waitInterval.cxx waitInterval.I waitInterval.h \
-    lerp_helpers.h
-
-  #define INSTALL_HEADERS \
-    config_interval.h \
-    cInterval.I cInterval.h \
-    cIntervalManager.I cIntervalManager.h \
-    cConstraintInterval.I cConstraintInterval.h \
-    cConstrainTransformInterval.I cConstrainTransformInterval.h \
-    cConstrainPosInterval.I cConstrainPosInterval.h \
-    cConstrainHprInterval.I cConstrainHprInterval.h \
-    cConstrainPosHprInterval.I cConstrainPosHprInterval.h \
-    cLerpInterval.I cLerpInterval.h \
-    cLerpNodePathInterval.I cLerpNodePathInterval.h \
-    cLerpAnimEffectInterval.I cLerpAnimEffectInterval.h \
-    cMetaInterval.I cMetaInterval.h \
-    hideInterval.I hideInterval.h \
-    lerpblend.h \
-    showInterval.I showInterval.h \
-    waitInterval.I waitInterval.h \
-    lerp_helpers.h
-
-  #define IGATESCAN all
-#end lib_target

+ 0 - 3
direct/src/leveleditor/Sources.pp

@@ -1,3 +0,0 @@
-// Toontown-specific codes have been moved into $TOONTOWN source tree
-
-

+ 0 - 42
direct/src/motiontrail/Sources.pp

@@ -1,42 +0,0 @@
-#begin lib_target
-  #define TARGET p3motiontrail
-  #define LOCAL_LIBS \
-    p3directbase
-
-  #define OTHER_LIBS \
-    p3linmath:c \
-    p3mathutil:c \
-    p3gobj:c \
-    p3putil:c \
-    p3pipeline:c \
-    p3event:c \
-    p3pstatclient:c \
-    p3pnmimage:c \
-    $[if $[HAVE_NET],p3net:c] $[if $[WANT_NATIVE_NET],p3nativenet:c] \
-    p3pgraph:c \
-    panda:m \
-    p3express:c \
-    p3downloader:c \
-    pandaexpress:m \
-    p3interrogatedb:c \
-    p3dconfig:c \
-    p3dtoolconfig:m \
-    p3dtoolutil:c \
-    p3dtoolbase:c \
-    p3dtool:m \
-    p3pandabase:c \
-    p3prc:c \
-    p3gsgbase:c \
-    p3parametrics:c
-
-  
-  #define SOURCES \
-    config_motiontrail.cxx config_motiontrail.h \
-    cMotionTrail.cxx cMotionTrail.h 
-
-  #define INSTALL_HEADERS \
-    config_motiontrail.h \
-    cMotionTrail.h 
-
-  #define IGATESCAN all
-#end lib_target

+ 0 - 77
direct/src/p3d/Sources.pp

@@ -1,77 +0,0 @@
-// This directory contains the Python code necessary to interface with
-// the browser plugin system at runtime.  It also contains the Python
-// scripts to create and manage p3d files, which are the actual
-// runtime applications, and packages, which are additional code and
-// assets that can be downloaded at runtime by p3d files.
-
-#if $[BUILD_P3D_SCRIPTS]
-
-  // If the developer has asked to build the shell script to invoke
-  // ppackage.py (or some other Python script in this directory), then
-  // do so now.  These convenient scripts aren't built by default,
-  // because usually ppackage.p3d etc. is a more reliable way to
-  // invoke these applications.  However, there might be development
-  // environments that don't have a ppackage.p3d available, in which
-  // case it is convenient to have one or more of these scripts.
-
-  #define INSTALL_SCRIPTS $[BUILD_P3D_SCRIPTS:%=%.py]
-
-  // On Windows, we generate a batch file; on other platforms
-  // (including Cygwin), we generate a sh script.
-
-  #define install_dir $[$[upcase $[PACKAGE]]_INSTALL]
-  #define install_bin_dir $[or $[INSTALL_BIN_DIR],$[install_dir]/bin]
-
-  #define python $[PYTHON_COMMAND]
-  #if $[USE_DEBUG_PYTHON]
-    #define python $[PYTHON_DEBUG_COMMAND]
-  #endif
-  #if $[>= $[OPTIMIZE], 4]
-    #define python $[python] -OO
-  #endif
-
-  #foreach scriptname $[BUILD_P3D_SCRIPTS]
-    #if $[MAKE_BAT_SCRIPTS]
-      #set INSTALL_SCRIPTS $[INSTALL_SCRIPTS] $[scriptname].bat
-    #else
-      #set INSTALL_SCRIPTS $[INSTALL_SCRIPTS] $[scriptname]
-    #endif
-
-    #if $[MAT_BAT_SCRIPTS]
-  #output $[scriptname].bat notouch
-@echo off
-rem #### Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]].
-rem ################################# DO NOT EDIT ###########################
-
-$[python] -u $[osfilename $[install_bin_dir]/$[scriptname].py] %1 %2 %3 %4 %5 %6 %7 %8 %9
-  #end $[scriptname].bat
-
-  #else  // MAKE_BAT_SCRIPTS
-
-  #output $[scriptname] binary notouch
-#! /bin/sh
-#### Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]].
-################################# DO NOT EDIT ###########################
-
-  #if $[CTPROJS]
-# This script was generated while the user was using the ctattach
-# tools.  That had better still be the case.
-  #if $[WINDOWS_PLATFORM]
-$[python] -u `cygpath -w $DIRECT/built/bin/$[scriptname].py` "$@"
-  #else
-$[python] -u $DIRECT/built/bin/$[scriptname].py "$@"
-  #endif
-  #else
-$[python] -u '$[osfilename $[install_bin_dir]/$[scriptname].py]' "$@"
-  #endif
-  #end $[scriptname]
-
-  #endif  // MAKE_BAT_SCRIPTS
-
-  #end scriptname
-
-#endif  // WANT_PACKAGE_SCRIPT
-
-
-
-

+ 0 - 0
direct/src/particles/Sources.pp


+ 0 - 0
direct/src/physics/Sources.pp


+ 0 - 369
direct/src/plugin/Sources.pp

@@ -1,369 +0,0 @@
-// This directory contains the code for the "Core API" part of the
-// Panda3D browser plugin system, which is not built unless you have
-// defined HAVE_P3D_PLUGIN in your Config.pp.  Most Panda3D developers
-// will have no need to build this, unless you are developing the
-// plugin system itself.
-
-// This directory also contains the code for p3dpython.exe, which is
-// part of the Panda3D rtdist build.  It's not strictly part of the
-// "Core API"; it is packaged as part of each downloadable version of
-// Panda3D.  It is only built if you have defined either
-// PANDA_PACKAGE_HOST_URL or HAVE_P3D_RTDIST in your Config.pp, which
-// indicates an intention to build a downloadable version of Panda3D.
-// Developers who are preparing a custom Panda3D package for download
-// by the plugin will need to build this.
-
-// If P3D_PLUGIN_MT is defined, then (on Windows) /MT is used to
-// compile the core API and the NPAPI and ActiveX plugins, instead of
-// /MD.  This links the plugin with the static C runtime library,
-// instead of the dynamic runtime library, which is much better for
-// distributing the plugin with the XPI and CAB interfaces.  This
-// requires that special /MT versions of OpenSSL and zlib are available.
-
-#define _MT $[if $[P3D_PLUGIN_MT],_mt]
-
-#define COREAPI_SOURCES \
-    fileSpec.cxx fileSpec.h fileSpec.I \
-    find_root_dir.cxx find_root_dir.h \
-    $[if $[IS_OSX],find_root_dir_assist.mm] \
-    get_tinyxml.h \
-    binaryXml.cxx binaryXml.h \
-    fhandle.h \
-    handleStream.cxx handleStream.h handleStream.I \
-    handleStreamBuf.cxx handleStreamBuf.h handleStreamBuf.I \
-    mkdir_complete.cxx mkdir_complete.h \
-    parse_color.cxx parse_color.h \
-    wstring_encode.cxx wstring_encode.h \
-    p3d_lock.h p3d_plugin.h \
-    p3d_plugin_config.h \
-    p3d_plugin_common.h \
-    p3dAuthSession.h p3dAuthSession.I \
-    p3dBoolObject.h \
-    p3dConcreteSequence.h \
-    p3dConcreteStruct.h \
-    p3dConditionVar.h p3dConditionVar.I \
-    p3dDownload.h p3dDownload.I \
-    p3dFileDownload.h p3dFileDownload.I \
-    p3dFileParams.h p3dFileParams.I \
-    p3dFloatObject.h \
-    p3dHost.h p3dHost.I \
-    p3dInstance.h p3dInstance.I \
-    p3dInstanceManager.h p3dInstanceManager.I \
-    p3dIntObject.h \
-    p3dMainObject.h \
-    p3dMultifileReader.h p3dMultifileReader.I \
-    p3dNoneObject.h \
-    p3dObject.h p3dObject.I \
-    p3dOsxSplashWindow.h p3dOsxSplashWindow.I \
-    p3dPackage.h p3dPackage.I \
-    p3dPatchfileReader.h p3dPatchfileReader.I \
-    p3dPatchFinder.h p3dPatchFinder.I \
-    p3dPythonObject.h \
-    p3dReferenceCount.h p3dReferenceCount.I \
-    p3dSession.h p3dSession.I \
-    p3dSplashWindow.h p3dSplashWindow.I \
-    p3dStringObject.h \
-    p3dTemporaryFile.h p3dTemporaryFile.I \
-    p3dUndefinedObject.h \
-    p3dWinSplashWindow.h p3dWinSplashWindow.I \
-    p3dX11SplashWindow.h p3dX11SplashWindow.I \
-    p3dWindowParams.h p3dWindowParams.I \
-    plugin_get_x11.h \
-    xml_helpers.h \
-    run_p3dpython.h
-
-#define COREAPI_INCLUDED_SOURCES \
-    p3d_plugin.cxx \
-    p3dAuthSession.cxx \
-    p3dBoolObject.cxx \
-    p3dConcreteSequence.cxx \
-    p3dConcreteStruct.cxx \
-    p3dConditionVar.cxx \
-    p3dDownload.cxx \
-    p3dFileDownload.cxx \
-    p3dFileParams.cxx \
-    p3dFloatObject.cxx \
-    p3dHost.cxx \
-    p3dInstance.cxx \
-    p3dInstanceManager.cxx \
-    p3dIntObject.cxx \
-    p3dMainObject.cxx \
-    p3dMultifileReader.cxx \
-    p3dNoneObject.cxx \
-    p3dObject.cxx \
-    p3dOsxSplashWindow.cxx \
-    p3dPackage.cxx \
-    p3dPatchfileReader.cxx \
-    p3dPatchFinder.cxx \
-    p3dPythonObject.cxx \
-    p3dReferenceCount.cxx \
-    p3dSession.cxx \
-    p3dSplashWindow.cxx \
-    p3dStringObject.cxx \
-    p3dTemporaryFile.cxx \
-    p3dUndefinedObject.cxx \
-    p3dWinSplashWindow.cxx \
-    p3dX11SplashWindow.cxx \
-    p3dWindowParams.cxx \
-    xml_helpers.cxx
-
-#begin lib_target
-
-//
-// p3d_plugin.dll, the main entry point to the Core API.
-//
-
-  #define BUILD_TARGET $[and $[HAVE_P3D_PLUGIN],$[HAVE_OPENSSL],$[HAVE_ZLIB]]
-  #define USE_PACKAGES openssl$[_MT] zlib$[_MT] x11
-  #define TARGET p3d_plugin
-  #define LIB_PREFIX
-  #define BUILDING_DLL BUILDING_P3D_PLUGIN
-  #define LINK_FORCE_STATIC_RELEASE_C_RUNTIME $[P3D_PLUGIN_MT]
-
-  #define OTHER_LIBS \
-    p3tinyxml$[_MT] $[if $[OSX_PLATFORM],p3subprocbuffer]
-
-  #define COMBINED_SOURCES p3d_plugin_composite1.cxx
-  #define SOURCES $[COREAPI_SOURCES]
-  #define INCLUDED_SOURCES $[COREAPI_INCLUDED_SOURCES]
-
-  #define INSTALL_HEADERS \
-    p3d_plugin.h
-
-  #define WIN_SYS_LIBS user32.lib gdi32.lib shell32.lib comctl32.lib msimg32.lib ole32.lib
-
-#end lib_target
-
-#begin static_lib_target
-
-//
-// libp3d_plugin_static.lib, the Core API as a static library (for p3dembed).
-//
-
-  #define BUILD_TARGET $[and $[HAVE_P3D_PLUGIN],$[HAVE_OPENSSL],$[HAVE_ZLIB]]
-  #define USE_PACKAGES openssl zlib x11
-  #define TARGET p3d_plugin_static
-  #define BUILDING_DLL BUILDING_P3D_PLUGIN
-
-  #define OTHER_LIBS \
-    p3tinyxml $[if $[OSX_PLATFORM],p3subprocbuffer]
-
-  #define COMBINED_SOURCES p3d_plugin_composite1.cxx
-  #define SOURCES $[COREAPI_SOURCES]
-  #define INCLUDED_SOURCES $[COREAPI_INCLUDED_SOURCES]
-
-  #define WIN_SYS_LIBS user32.lib gdi32.lib shell32.lib comctl32.lib msimg32.lib ole32.lib
-
-#end static_lib_target
-
-#begin bin_target
-
-//
-// p3dcert.exe, the authorization GUI invoked when the user clicks the
-// red "play" button to approve an unknown certificate.  Considered
-// part of the Core API, though it is a separate download.
-//
-#if $[HAVE_FLTK]
-  #define BUILD_TARGET $[and $[HAVE_P3D_PLUGIN],$[HAVE_FLTK],$[HAVE_OPENSSL]]
-  #define USE_PACKAGES fltk openssl
-  #define SOURCES p3dCert.cxx p3dCert.h
-#else
-  #define BUILD_TARGET $[and $[HAVE_P3D_PLUGIN],$[HAVE_WX],$[HAVE_OPENSSL]]
-  #define USE_PACKAGES wx openssl
-  #define SOURCES p3dCert_wx.cxx p3dCert_wx.h
-#endif
-  #define TARGET p3dcert
-
-  #define SOURCES $[SOURCES] \
-    is_pathsep.h is_pathsep.I \
-    wstring_encode.cxx wstring_encode.h \
-    mkdir_complete.cxx mkdir_complete.h
-
-  #define OSX_SYS_FRAMEWORKS Carbon
-
-#end bin_target
-
-
-#define PLUGIN_COMMON_SOURCES \
-    load_plugin.cxx load_plugin.h \
-    fileSpec.cxx fileSpec.h fileSpec.I \
-    find_root_dir.cxx find_root_dir.h \
-    $[if $[IS_OSX],find_root_dir_assist.mm] \
-    is_pathsep.h is_pathsep.I \
-    mkdir_complete.cxx mkdir_complete.h \
-    get_twirl_data.cxx get_twirl_data.h \
-    parse_color.cxx parse_color.h \
-    wstring_encode.cxx wstring_encode.h
-
-
-#begin static_lib_target
-//
-// libplugin_common.lib, a repository of code shared between the core
-// API and the various plugin implementations.
-//
-
-  #define BUILD_TARGET $[and $[HAVE_P3D_PLUGIN],$[HAVE_OPENSSL]]
-  #define TARGET plugin_common
-  #define USE_PACKAGES openssl
-
-  #define SOURCES $[PLUGIN_COMMON_SOURCES]
-
-#end static_lib_target
-
-#if $[P3D_PLUGIN_MT]
-#begin static_lib_target
-//
-// libplugin_common_mt.lib, the same as above, with /MT compilation.
-//
-
-  #define BUILD_TARGET $[and $[HAVE_P3D_PLUGIN],$[HAVE_OPENSSL]]
-  #define TARGET plugin_common_mt
-  #define USE_PACKAGES openssl_mt
-  #define LINK_FORCE_STATIC_RELEASE_C_RUNTIME 1
-
-  #define SOURCES $[PLUGIN_COMMON_SOURCES]
-
-#end static_lib_target
-#endif  // $[P3D_PLUGIN_MT]
-
-
-
-// The remaining targets build p3dpython.exe and variants.
-
-#begin bin_target
-
-//
-// p3dpython.exe, the primary entry point to the downloaded Panda3D
-// runtime.  This executable is run in a child process by the Core API
-// to invoke a particular instance of Panda.
-//
-
-  #define BUILD_TARGET $[and $[HAVE_P3D_RTDIST],$[HAVE_PYTHON],$[HAVE_OPENSSL]]
-  #define USE_PACKAGES python openssl cg
-  #define TARGET p3dpython
-
-  #define OTHER_LIBS \
-    p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3express:c pandaexpress:m p3dxml:c \
-    p3pgraph:c p3pgraphnodes:c p3cull:c p3gsgbase:c p3gobj:c \
-    p3mathutil:c p3downloader:c p3pnmimage:c \
-    p3prc:c p3pstatclient:c p3pandabase:c p3linmath:c p3putil:c \
-    p3pipeline:c p3event:c p3display:c panda:m \
-    $[if $[WANT_NATIVE_NET],p3nativenet:c] \
-    $[if $[HAVE_NET],p3net:c] \
-    p3tinyxml
-
-  #define SOURCES \
-    binaryXml.cxx binaryXml.h \
-    fhandle.h \
-    handleStream.cxx handleStream.h handleStream.I \
-    handleStreamBuf.cxx handleStreamBuf.h handleStreamBuf.I \
-    p3d_lock.h p3d_plugin.h \
-    p3d_plugin_config.h \
-    p3dCInstance.cxx \
-    p3dCInstance.h p3dCInstance.I \
-    p3dPythonRun.cxx p3dPythonRun.h p3dPythonRun.I \
-    run_p3dpython.h run_p3dpython.cxx
-
-  #define SOURCES $[SOURCES] \
-    p3dPythonMain.cxx
-
-  // If you have to link with a static Python library, define it here.
-  #define EXTRA_LIBS $[EXTRA_P3DPYTHON_LIBS]
-  #define OSX_SYS_FRAMEWORKS Carbon
-
-  #define WIN_SYS_LIBS user32.lib
-#end bin_target
-
-#begin bin_target
-
-//
-// p3dpythonw.exe, a special variant on p3dpython.exe required by
-// Windows (and built only on a Windows platform).  This variant is
-// compiled as a desktop application, as opposed to p3dpython.exe,
-// which is a console application.  (Both variants are required,
-// because the plugin might be invoked either from a console or from
-// the desktop.)
-//
-
-  #define BUILD_TARGET $[and $[HAVE_P3D_RTDIST],$[HAVE_PYTHON],$[HAVE_OPENSSL],$[WINDOWS_PLATFORM]]
-  #define USE_PACKAGES python openssl
-  #define TARGET p3dpythonw
-  #define EXTRA_CDEFS NON_CONSOLE
-
-  #define OTHER_LIBS \
-    p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3express:c pandaexpress:m p3dxml:c \
-    p3pgraph:c p3pgraphnodes:c p3cull:c p3gsgbase:c p3gobj:c \
-    p3mathutil:c p3downloader:c p3pnmimage:c \
-    p3prc:c p3pstatclient:c p3pandabase:c p3linmath:c p3putil:c \
-    p3pipeline:c p3event:c p3display:c panda:m \
-    $[if $[WANT_NATIVE_NET],p3nativenet:c] \
-    $[if $[HAVE_NET],p3net:c] \
-    p3tinyxml
-
-  #define SOURCES \
-    binaryXml.cxx binaryXml.h \
-    fhandle.h \
-    handleStream.cxx handleStream.h handleStream.I \
-    handleStreamBuf.cxx handleStreamBuf.h handleStreamBuf.I \
-    p3d_lock.h p3d_plugin.h \
-    p3d_plugin_config.h \
-    p3dCInstance.cxx \
-    p3dCInstance.h p3dCInstance.I \
-    p3dPythonRun.cxx p3dPythonRun.h p3dPythonRun.I \
-    run_p3dpython.h run_p3dpython.cxx
-
-  #define SOURCES $[SOURCES] \
-    p3dPythonMain.cxx
-
-  // If you have to link with a static Python library, define it here.
-  #define EXTRA_LIBS $[EXTRA_P3DPYTHON_LIBS]
-  #define OSX_SYS_FRAMEWORKS Carbon
-
-  #define WIN_SYS_LIBS user32.lib
-#end bin_target
-
-#begin lib_target
-
-//
-// libp3dpython.dll, a special library used to run P3DPythonRun within
-// the parent (browser) process, instead of forking a child, as a
-// desparation fallback in case forking fails for some reason.
-//
-  #define BUILD_TARGET $[and $[HAVE_P3D_RTDIST],$[HAVE_PYTHON],$[HAVE_OPENSSL]]
-  #define USE_PACKAGES python openssl cg
-  #define TARGET libp3dpython
-  #define LIB_PREFIX
-
-  #define OTHER_LIBS \
-    p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3express:c pandaexpress:m p3dxml:c \
-    p3pgraph:c p3pgraphnodes:c p3cull:c p3gsgbase:c p3gobj:c \
-    p3mathutil:c p3downloader:c p3pnmimage:c \
-    p3prc:c p3pstatclient:c p3pandabase:c p3linmath:c p3putil:c \
-    p3pipeline:c p3event:c p3display:c panda:m \
-    $[if $[WANT_NATIVE_NET],p3nativenet:c] \
-    $[if $[HAVE_NET],p3net:c] \
-    p3tinyxml
-
-  #define SOURCES \
-    binaryXml.cxx binaryXml.h \
-    fhandle.h \
-    handleStream.cxx handleStream.h handleStream.I \
-    handleStreamBuf.cxx handleStreamBuf.h handleStreamBuf.I \
-    p3d_lock.h p3d_plugin.h \
-    p3d_plugin_config.h \
-    p3dCInstance.cxx \
-    p3dCInstance.h p3dCInstance.I \
-    p3dPythonRun.cxx p3dPythonRun.h p3dPythonRun.I \
-    run_p3dpython.h run_p3dpython.cxx
-
-  #define WIN_SYS_LIBS user32.lib
-#end lib_target
-
-
-#include $[THISDIRPREFIX]p3d_plugin_config.h.pp

+ 0 - 38
direct/src/plugin/p3d_plugin_config.h.pp

@@ -1,38 +0,0 @@
-// This file is read and processed by ppremake to generate
-// p3d_plugin_config.h.
-
-#output p3d_plugin_config.h notouch
-/* p3d_plugin_config.h.  Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]]. */
-/********************************** DO NOT EDIT ****************************/
-
-/* The URL that is the root of the download server that this plugin
-   should contact.  The contents.xml file that defines this particular
-   "coreapi" package should be found at this location. */
-#$[]define PANDA_PACKAGE_HOST_URL "$[PANDA_PACKAGE_HOST_URL]"
-
-/* The Core API version number.  This one also appears in
-   pandaVersion.h. */
-#$[]define P3D_COREAPI_VERSION_STR "$[join .,$[P3D_COREAPI_VERSION]]"
-
-/* As does the plugin version number. */
-#$[]define P3D_PLUGIN_VERSION_STR "$[join .,$[P3D_PLUGIN_VERSION]]"
-
-/* The filename(s) to generate output to when the plugin is running.
-   For debugging purposes only. */
-#$[]define P3D_PLUGIN_LOG_DIRECTORY "$[subst \,\\,$[osfilename $[P3D_PLUGIN_LOG_DIRECTORY]]]"
-#$[]define P3D_PLUGIN_LOG_BASENAME1 "$[P3D_PLUGIN_LOG_BASENAME1]"
-#$[]define P3D_PLUGIN_LOG_BASENAME2 "$[P3D_PLUGIN_LOG_BASENAME2]"
-#$[]define P3D_PLUGIN_LOG_BASENAME3 "$[P3D_PLUGIN_LOG_BASENAME3]"
-
-/* For development only: the location at which p3dpython.exe can be
-   found.  Empty string for the default. */
-#$[]define P3D_PLUGIN_P3DPYTHON "$[subst \,\\,$[osfilename $[P3D_PLUGIN_P3DPYTHON]]]"
-
-/* For development only: the location at which p3d_plugin.dll/.so can
-   be found.  Empty string for the default. */
-#$[]define P3D_PLUGIN_P3D_PLUGIN "$[subst \,\\,$[osfilename $[P3D_PLUGIN_P3D_PLUGIN]]]"
-
-/* We need to know whether GTK is enabled for XEmbed. */
-$[cdefine HAVE_GTK]
-
-#end p3d_plugin_config.h

+ 0 - 175
direct/src/plugin_activex/P3DActiveX.rc.pp

@@ -1,175 +0,0 @@
-//
-// P3DActiveX.rc.pp
-//
-// This file defines the script to auto-generate P3DActiveX.rc at
-// ppremake time.  We use this to fill in the DLL version correctly.
-//
-
-#output P3DActiveX.rc notouch
-/$[]/#### Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]].
-/$[]/################################# DO NOT EDIT ###########################
-
-// Microsoft Visual C++ generated resource script.
-//
-#$[]include "resource.h"
-
-#$[]define APSTUDIO_READONLY_SYMBOLS
-/////////////////////////////////////////////////////////////////////////////
-//
-// Generated from the TEXTINCLUDE 2 resource.
-//
-#$[]include "afxres.h"
-
-/////////////////////////////////////////////////////////////////////////////
-#$[]undef APSTUDIO_READONLY_SYMBOLS
-
-/////////////////////////////////////////////////////////////////////////////
-// English (U.S.) resources
-
-#$[]if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
-#$[]ifdef _WIN32
-LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
-#$[]pragma code_page(1252)
-#$[]endif //_WIN32
-
-#$[]ifdef APSTUDIO_INVOKED
-/////////////////////////////////////////////////////////////////////////////
-//
-// TEXTINCLUDE
-//
-
-1 TEXTINCLUDE 
-BEGIN
-    "resource.h\0"
-END
-
-2 TEXTINCLUDE 
-BEGIN
-    "#$[]include ""afxres.h""\r\n"
-    "\0"
-END
-
-3 TEXTINCLUDE 
-BEGIN
-    "1 TYPELIB ""P3DActiveX.tlb""\r\n"
-    "\0"
-END
-
-#$[]endif    // APSTUDIO_INVOKED
-
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Version
-//
-
-VS_VERSION_INFO VERSIONINFO
- FILEVERSION $[P3D_PLUGIN_DLL_COMMA_VERSION]
- PRODUCTVERSION $[P3D_PLUGIN_DLL_COMMA_VERSION]
- FILEFLAGSMASK 0x3fL
-#$[]ifdef _DEBUG
- FILEFLAGS 0x1L
-#$[]else
- FILEFLAGS 0x0L
-#$[]endif
- FILEOS 0x4L
- FILETYPE 0x2L
- FILESUBTYPE 0x0L
-BEGIN
-    BLOCK "StringFileInfo"
-    BEGIN
-        BLOCK "040904e4"
-        BEGIN
-            VALUE "FileDescription", "Panda3D Game Engine Plug-in $[P3D_PLUGIN_VERSION_STR]\0"
-            Value "CompanyName", "$[PANDA_DISTRIBUTOR]"
-            VALUE "FileVersion", "$[P3D_PLUGIN_DLL_DOT_VERSION]"
-            VALUE "InternalName", "P3DActiveX.ocx"
-            VALUE "LegalTrademarks", "\0"
-            VALUE "FileOpenName", "Panda3D applet\0"
-            VALUE "OLESelfRegister", "\0"
-            VALUE "OriginalFilename", "P3DActiveX.ocx"
-            VALUE "ProductName", "Panda3D Game Engine Plug-in $[P3D_PLUGIN_VERSION_STR]\0"
-            VALUE "ProductVersion", "$[P3D_PLUGIN_DLL_DOT_VERSION]"
-        END
-    END
-    BLOCK "VarFileInfo"
-    BEGIN
-        VALUE "Translation", 0x409, 1252
-    END
-END
-
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Bitmap
-//
-
-IDB_P3DACTIVEX          BITMAP                  "P3DActiveXCtrl.bmp"
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Dialog
-//
-
-IDD_PROPPAGE_P3DACTIVEX DIALOG  0, 0, 250, 62
-STYLE DS_SETFONT | WS_CHILD
-FONT 8, "MS Sans Serif"
-BEGIN
-    LTEXT           "TODO: Place controls to manipulate properties of P3DActiveX Control on this dialog.",
-                    IDC_STATIC,7,25,229,16
-END
-
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// DESIGNINFO
-//
-
-#$[]ifdef APSTUDIO_INVOKED
-GUIDELINES DESIGNINFO 
-BEGIN
-    IDD_PROPPAGE_P3DACTIVEX, DIALOG
-    BEGIN
-        LEFTMARGIN, 7
-        RIGHTMARGIN, 243
-        TOPMARGIN, 7
-        BOTTOMMARGIN, 55
-    END
-END
-#$[]endif    // APSTUDIO_INVOKED
-
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// String Table
-//
-
-STRINGTABLE 
-BEGIN
-    IDS_P3DACTIVEX          "P3DActiveX Control"
-    IDS_P3DACTIVEX_PPG      "P3DActiveX Property Page"
-END
-
-STRINGTABLE 
-BEGIN
-    IDS_P3DACTIVEX_PPG_CAPTION "General"
-END
-
-#$[]endif    // English (U.S.) resources
-/////////////////////////////////////////////////////////////////////////////
-
-
-
-#$[]ifndef APSTUDIO_INVOKED
-/////////////////////////////////////////////////////////////////////////////
-//
-// Generated from the TEXTINCLUDE 3 resource.
-//
-1 TYPELIB "P3DActiveX.tlb"
-
-/////////////////////////////////////////////////////////////////////////////
-#$[]endif    // not APSTUDIO_INVOKED
-
-
-#end P3DActiveX.rc
-

+ 0 - 42
direct/src/plugin_activex/Sources.pp

@@ -1,42 +0,0 @@
-// This directory builds the code for the ActiveX (Internet Explorer)
-// plugin, part of the Panda3D browser plugin system.  Most Panda3D
-// developers will have no need to build this, unless you are
-// developing the plugin system itself.  Define HAVE_P3D_PLUGIN in
-// your Config.pp to build this directory.
-
-#define BUILD_DIRECTORY $[and $[HAVE_P3D_PLUGIN],$[WINDOWS_PLATFORM],$[HAVE_ACTIVEX],$[not $[eq $[USE_COMPILER],MSVC9x64]]]
-
-#define _MT $[if $[P3D_PLUGIN_MT],_mt]
-
-#begin lib_target
-  #define TARGET p3dactivex
-  #define LIB_PREFIX
-  #define DYNAMIC_LIB_EXT .ocx
-
-  #define LOCAL_LIBS plugin_common$[_MT]
-  #define LINK_FORCE_STATIC_RELEASE_C_RUNTIME $[P3D_PLUGIN_MT]
-  #define OTHER_LIBS p3tinyxml$[_MT]
-
-  #define COMBINED_SOURCES \
-    $[TARGET]_composite1.cxx
-
-  #define SOURCES \
-    P3DActiveX.h P3DActiveXCtrl.h P3DActiveXPropPage.h \
-    PPBrowserObject.h PPDownloadCallback.h PPDownloadRequest.h \
-    PPInstance.h PPInterface.h PPLogger.h PPPandaObject.h \
-    resource.h P3DActiveX.idl
-
-  #define INCLUDED_SOURCES \
-    P3DActiveX.cpp P3DActiveXCtrl.cpp P3DActiveXPropPage.cpp \
-    PPBrowserObject.cpp PPDownloadCallback.cpp PPDownloadRequest.cpp \
-    PPInstance.cpp PPInterface.cpp PPLogger.cpp PPPandaObject.cpp
-
-  #define EXTRA_CDEFS _USRDLL _WINDLL _MBCS $[if $[not $[P3D_PLUGIN_MT]],_AFXDLL]
-  #define WIN_RESOURCE_FILE P3DActiveX.rc
-  #define LINKER_DEF_FILE P3DActiveX.def
-
-  #define INSTALL_HEADERS
-
-#end lib_target
-
-#include $[THISDIRPREFIX]P3DActiveX.rc.pp

+ 0 - 62
direct/src/plugin_npapi/Sources.pp

@@ -1,62 +0,0 @@
-// This directory builds the code for the NPAPI (Mozilla) plugin, part
-// of the Panda3D browser plugin system.  Most Panda3D developers will
-// have no need to build this, unless you are developing the plugin
-// system itself.  Define HAVE_P3D_PLUGIN in your Config.pp to build
-// this directory.
-
-#define BUILD_DIRECTORY $[and $[HAVE_P3D_PLUGIN],$[HAVE_NPAPI]]
-
-#define _MT $[if $[P3D_PLUGIN_MT],_mt]
-#define USE_PACKAGES npapi gtk
-
-#begin lib_target
-  // By Mozilla convention, on Windows at least, the generated DLL
-  // filename must begin with "np", not "libnp".  (Actually, this is
-  // probably no longer true on recent versions of Mozilla.  But why
-  // take chances?)
-  #define TARGET nppanda3d
-  #define LIB_PREFIX
-
-  #define LOCAL_LIBS plugin_common$[_MT]
-  #define LINK_FORCE_STATIC_RELEASE_C_RUNTIME $[P3D_PLUGIN_MT]
-  #define OTHER_LIBS p3tinyxml$[_MT]
-
-  #define COMBINED_SOURCES \
-    $[TARGET]_composite1.cxx
-
-  #define SOURCES \
-    nppanda3d_common.h \
-    ppBrowserObject.h ppBrowserObject.I \
-    ppDownloadRequest.h ppDownloadRequest.I \
-    ppInstance.h ppInstance.I \
-    ppPandaObject.h ppPandaObject.I \
-    ppToplevelObject.h ppToplevelObject.I \
-    startup.h
-
-  #define INCLUDED_SOURCES \
-    ppBrowserObject.cxx \
-    ppDownloadRequest.cxx \
-    ppInstance.cxx \
-    ppPandaObject.cxx \
-    ppToplevelObject.cxx \
-    startup.cxx
- 
-  // Windows-specific options.
-  #if $[WINDOWS_PLATFORM]
-    #define WIN_RESOURCE_FILE nppanda3d.rc
-    #define LINKER_DEF_FILE nppanda3d.def
-    #define WIN_SYS_LIBS user32.lib shell32.lib ole32.lib
-  #endif
-
-  // Mac-specific options.
-  #if $[OSX_PLATFORM]
-    #define LINK_AS_BUNDLE 1
-    #define BUNDLE_EXT
-  #endif
-
-  #define INSTALL_HEADERS
-
-#end lib_target
-
-
-#include $[THISDIRPREFIX]nppanda3d.rc.pp

+ 0 - 55
direct/src/plugin_npapi/nppanda3d.rc.pp

@@ -1,55 +0,0 @@
-//
-// nppanda3d.rc.pp
-//
-// This file defines the script to auto-generate nppanda3d.rc at
-// ppremake time.  We use this to fill in the DLL version correctly.
-//
-// This resource file is required on Windows to load the appropriate
-// text into the DLL, so Mozilla will recognize the DLL as a plugin.
-// It also defines the MIME type supported by the plugin.
-
-#output nppanda3d.rc notouch
-/$[]/#### Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]].
-/$[]/################################# DO NOT EDIT ###########################
-
-#$[]define APSTUDIO_READONLY_SYMBOLS
-#$[]include "winresrc.h"
-#$[]undef APSTUDIO_READONLY_SYMBOLS
-
-
-VS_VERSION_INFO VERSIONINFO
- FILEVERSION $[P3D_PLUGIN_DLL_COMMA_VERSION]
- PRODUCTVERSION $[P3D_PLUGIN_DLL_COMMA_VERSION]
- FILEFLAGSMASK 0x3fL
-#$[]ifdef _DEBUG
- FILEFLAGS 0x1L
-#$[]else
- FILEFLAGS 0x0L
-#$[]endif
- FILEOS 0x40004L
- FILETYPE 0x2L
- FILESUBTYPE 0x0L
-BEGIN
-    BLOCK "StringFileInfo"
-    BEGIN
-        BLOCK "040904e4"
-        BEGIN
-            VALUE "FileDescription", "Runs 3-D games and interactive applets\0"
-            VALUE "FileVersion", "$[P3D_PLUGIN_DLL_DOT_VERSION]"
-            VALUE "LegalTrademarks", "\0"
-            VALUE "MIMEType", "application/x-panda3d\0"
-            VALUE "FileExtents", "p3d\0"
-            VALUE "FileOpenName", "Panda3D applet\0"
-            VALUE "OriginalFilename", "nppanda3d.dll\0"
-            VALUE "ProductName", "Panda3D Game Engine Plug-In $[P3D_PLUGIN_VERSION_STR]\0"
-            VALUE "ProductVersion", "$[P3D_PLUGIN_DLL_DOT_VERSION]"
-        END
-    END
-    BLOCK "VarFileInfo"
-    BEGIN
-        VALUE "Translation", 0x409, 1252
-    END
-END
-
-#end nppanda3d.rc
-

+ 0 - 142
direct/src/plugin_standalone/Sources.pp

@@ -1,142 +0,0 @@
-// This directory contains the code for the panda3d.exe executable,
-// the "standalone" part of the Panda3D plugin/runtime system.  Define
-// HAVE_P3D_PLUGIN in your Config.pp to build it.
-
-#define BUILD_DIRECTORY $[and $[HAVE_P3D_PLUGIN],$[HAVE_OPENSSL],$[HAVE_ZLIB]]
-
-#begin bin_target
-  #define USE_PACKAGES openssl zlib
-  #define TARGET panda3d
-
-  #define LOCAL_LIBS plugin_common
-
-  #define OTHER_LIBS \
-    p3prc:c p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3pandabase:c p3downloader:c p3express:c pandaexpress:m \
-    p3pystub p3tinyxml
-
-  #define OSX_SYS_FRAMEWORKS Foundation AppKit Carbon
-
-  #define SOURCES \
-    panda3dBase.cxx panda3dBase.h panda3dBase.I \
-    panda3d.cxx panda3d.h panda3d.I \
-    panda3dMain.cxx
-
-  #define WIN_RESOURCE_FILE panda3d.rc
-  #define WIN_SYS_LIBS user32.lib gdi32.lib shell32.lib ole32.lib
-
-#end bin_target
-
-#begin bin_target
-  // On Windows, we also need to build panda3dw.exe, the non-console
-  // version of panda3d.exe.
-
-  #define BUILD_TARGET $[WINDOWS_PLATFORM]
-  #define USE_PACKAGES openssl zlib
-  #define TARGET panda3dw
-
-  #define LOCAL_LIBS plugin_common
-
-  #define OTHER_LIBS \
-    p3prc:c p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3pandabase:c p3downloader:c p3express:c pandaexpress:m \
-    p3pystub p3tinyxml
-
-  #define OSX_SYS_FRAMEWORKS Foundation AppKit Carbon
-
-  #define SOURCES \
-    panda3dBase.cxx panda3dBase.h panda3dBase.I \
-    panda3d.cxx panda3d.h panda3d.I \
-    panda3dWinMain.cxx
-
-  #define WIN_RESOURCE_FILE panda3d.rc
-  #define WIN_SYS_LIBS user32.lib gdi32.lib shell32.lib ole32.lib
-
-#end bin_target
-
-#begin bin_target
-  // On Mac, we'll build panda3d_mac, which is the Carbon-friendly
-  // application we wrap in a bundle, for picking a p3d file from
-  // Finder.
-
-  #define BUILD_TARGET $[OSX_PLATFORM]
-  #define USE_PACKAGES openssl zlib
-  #define TARGET panda3d_mac
-
-  #define LOCAL_LIBS plugin_common
-
-  #define OTHER_LIBS \
-    p3prc:c p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3pandabase:c p3downloader:c p3express:c pandaexpress:m \
-    p3pystub p3tinyxml
-
-  #define OSX_SYS_FRAMEWORKS Foundation AppKit Carbon
-
-  #define SOURCES \
-    panda3dBase.cxx panda3dBase.h panda3dBase.I \
-    panda3d.cxx panda3d.h panda3d.I \
-    panda3dMac.cxx panda3dMac.h panda3dMac.I
-
-#end bin_target
-
-#begin bin_target
-  #define USE_PACKAGES openssl zlib
-  #define TARGET p3dembed
-  #define LOCAL_LIBS plugin_common p3d_plugin_static
-
-  // We need to define this, even though we are not creating a DLL,
-  // because we need the symbols to be "exported" so we can find them in
-  // our own address space.
-  #define EXTRA_CDEFS BUILDING_P3D_PLUGIN
-
-  #define OTHER_LIBS \
-    p3prc:c p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3pandabase:c p3downloader:c p3express:c pandaexpress:m \
-    p3pystub p3tinyxml \
-    $[if $[OSX_PLATFORM],p3subprocbuffer]
-
-  #define OSX_SYS_FRAMEWORKS Foundation AppKit Carbon
-
-  #define SOURCES \
-    panda3dBase.cxx panda3dBase.h panda3dBase.I \
-    p3dEmbed.cxx p3dEmbedMain.cxx
-
-  #define WIN_RESOURCE_FILE panda3d.rc
-  #define WIN_SYS_LIBS user32.lib gdi32.lib shell32.lib comctl32.lib msimg32.lib ole32.lib
-
-#end bin_target
-
-#begin bin_target
-  // On Windows, we also need to build p3dembedw.exe, the non-console
-  // version of p3dembed.exe.
-
-  #define BUILD_TARGET $[WINDOWS_PLATFORM]
-  #define USE_PACKAGES openssl zlib
-  #define TARGET p3dembedw
-  #define LOCAL_LIBS plugin_common p3d_plugin_static
-
-  // We need to define this, even though we are not creating a DLL,
-  // because we need the symbols to be "exported" so we can find them in
-  // our own address space.
-  #define EXTRA_CDEFS BUILDING_P3D_PLUGIN P3DEMBEDW
-
-  #define OTHER_LIBS \
-    p3prc:c p3dtoolutil:c p3dtoolbase:c p3dtool:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3pandabase:c p3downloader:c p3express:c pandaexpress:m \
-    p3pystub p3tinyxml
-
-  #define SOURCES \
-    panda3dBase.cxx panda3dBase.h panda3dBase.I \
-    p3dEmbed.cxx p3dEmbedMain.cxx
-
-  #define WIN_RESOURCE_FILE panda3d.rc
-  #define WIN_SYS_LIBS user32.lib gdi32.lib shell32.lib comctl32.lib msimg32.lib ole32.lib
-
-#end bin_target
-
-#include $[THISDIRPREFIX]panda3d.rc.pp

+ 0 - 53
direct/src/plugin_standalone/panda3d.rc.pp

@@ -1,53 +0,0 @@
-//
-// panda3d.rc.pp
-//
-// This file defines the script to auto-generate panda3d.rc at
-// ppremake time.  We use this to fill in the DLL version correctly.
-//
-
-#output panda3d.rc notouch
-/$[]/#### Generated automatically by $[PPREMAKE] $[PPREMAKE_VERSION] from $[notdir $[THISFILENAME]].
-/$[]/################################# DO NOT EDIT ###########################
-
-#$[]define APSTUDIO_READONLY_SYMBOLS
-#$[]include "winresrc.h"
-#$[]undef APSTUDIO_READONLY_SYMBOLS
-
-
-VS_VERSION_INFO VERSIONINFO
- FILEVERSION $[P3D_PLUGIN_DLL_COMMA_VERSION]
- PRODUCTVERSION $[P3D_PLUGIN_DLL_COMMA_VERSION]
- FILEFLAGSMASK 0x3fL
-#$[]ifdef _DEBUG
- FILEFLAGS 0x1L
-#$[]else
- FILEFLAGS 0x0L
-#$[]endif
- FILEOS 0x40004L
- FILETYPE 0x2L
- FILESUBTYPE 0x0L
-BEGIN
-    BLOCK "StringFileInfo"
-    BEGIN
-        BLOCK "040904e4"
-        BEGIN
-            VALUE "FileDescription", "Panda3D Game Engine Runtime $[P3D_PLUGIN_VERSION_STR]\0"
-            VALUE "FileVersion", "$[P3D_PLUGIN_DLL_DOT_VERSION]"
-            VALUE "LegalTrademarks", "\0"
-            VALUE "MIMEType", "application/x-panda3d\0"
-            VALUE "FileExtents", "p3d\0"
-            VALUE "FileOpenName", "Panda3D applet\0"
-            VALUE "ProductName", "Panda3D Game Engine Runtime $[P3D_PLUGIN_VERSION_STR]\0"
-            VALUE "ProductVersion", "$[P3D_PLUGIN_DLL_DOT_VERSION]"
-        END
-    END
-    BLOCK "VarFileInfo"
-    BEGIN
-        VALUE "Translation", 0x409, 1252
-    END
-END
-
-ICON_FILE       ICON    "panda3d.ico"
-
-#end panda3d.rc
-

+ 2 - 5
direct/src/showbase/PythonUtil.py

@@ -1420,17 +1420,14 @@ def printListEnum(l):
         pass
 
 # base class for all Panda C++ objects
-# libdtoolconfig doesn't seem to have this, grab it off of PandaNode
+# libdtoolconfig doesn't seem to have this, grab it off of TypedObject
 dtoolSuperBase = None
 
 def _getDtoolSuperBase():
     global dtoolSuperBase
     from panda3d.core import TypedObject
     dtoolSuperBase = TypedObject.__bases__[0]
-    assert repr(dtoolSuperBase) == "<type 'libdtoolconfig.DTOOL_SUPER_BASE111'>" \
-        or repr(dtoolSuperBase) == "<type 'libdtoolconfig.DTOOL_SUPPER_BASE111'>" \
-        or repr(dtoolSuperBase) == "<type 'dtoolconfig.DTOOL_SUPER_BASE111'>" \
-        or repr(dtoolSuperBase) == "<type 'dtoolconfig.DTOOL_SUPER_BASE'>"
+    assert dtoolSuperBase.__name__ == 'DTOOL_SUPER_BASE'
 
 safeReprNotify = None
 

+ 0 - 41
direct/src/showbase/Sources.pp

@@ -1,41 +0,0 @@
-#begin lib_target
-  #define TARGET p3showbase
-  #define LOCAL_LIBS \
-    p3directbase
-  #define OTHER_LIBS \
-    p3pgraph:c p3pgraphnodes:c p3gsgbase:c p3gobj:c p3mathutil:c p3pstatclient:c \
-    p3downloader:c p3pandabase:c p3pnmimage:c p3prc:c \
-    p3pipeline:c p3cull:c \
-    $[if $[HAVE_NET],p3net:c] $[if $[WANT_NATIVE_NET],p3nativenet:c] \
-    p3display:c p3linmath:c p3event:c p3putil:c panda:m \
-    p3express:c pandaexpress:m \
-    p3interrogatedb:c p3dconfig:c p3dtoolconfig:m \
-    p3dtoolutil:c p3dtoolbase:c p3dtool:m
-
-  #define WIN_SYS_LIBS \
-    User32.lib
-
-  #define SOURCES \
-    showBase.cxx showBase.h \
-    $[if $[IS_OSX],showBase_assist.mm]
-
-  #define IGATESCAN all
-#end lib_target
-
-// Define a Python extension module for operating on frozen modules.
-// This is a pure C module; it involves no Panda code or C++ code.
-#begin lib_target
-  #define BUILD_TARGET $[HAVE_PYTHON]
-  #define TARGET p3extend_frozen
-  #define LIB_PREFIX
-  #if $[OSX_PLATFORM]
-    #define LINK_AS_BUNDLE 1
-    #define BUNDLE_EXT .so
-  #endif
-  #if $[WINDOWS_PLATFORM]
-    #define DYNAMIC_LIB_EXT .pyd
-  #endif
-
-  #define SOURCES extend_frozen.c
-#end lib_target
-

+ 0 - 0
direct/src/showutil/Sources.pp


+ 0 - 2
direct/src/stdpy/Sources.pp

@@ -1,2 +0,0 @@
-// For now, since we are not installing Python files, this file can
-// remain empty.

+ 0 - 0
direct/src/task/Sources.pp


+ 0 - 3
direct/src/tkpanels/Sources.pp

@@ -1,3 +0,0 @@
-// For now, since we are not installing Python files, this file can
-// remain empty.
-

+ 0 - 3
direct/src/tkwidgets/Sources.pp

@@ -1,3 +0,0 @@
-// For now, since we are not installing Python files, this file can
-// remain empty.
-

+ 0 - 3
direct/src/wxwidgets/Sources.pp

@@ -1,3 +0,0 @@
-// For now, since we are not installing Python files, this file can
-// remain empty.
-

+ 0 - 63
dmodels/Package.pp

@@ -1,63 +0,0 @@
-//
-// Package.pp
-//
-// This file defines certain configuration variables that are to be
-// written into the various make scripts.  It is processed by ppremake
-// (along with the Sources.pp files in each of the various
-// directories) to generate build scripts appropriate to each
-// environment.
-//
-// This is the package-specific file, which should be at the top of
-// every source hierarchy.  It generally gets the ball rolling, and is
-// responsible for explicitly including all of the relevent Config.pp
-// files.
-
-
-
-// What is the name and version of this source tree?
-#if $[eq $[PACKAGE],]
-  #define PACKAGE dmodels
-  #define VERSION 0.80
-#endif
-
-
-// Where should we find the DIRECT source directory?
-#if $[or $[CTPROJS],$[DIRECT]]
-  // If we are presently attached, use the environment variable.
-  #define DIRECT_SOURCE $[DIRECT]
-  #if $[eq $[DIRECT],]
-    #error You seem to be attached to some trees, but not DIRECT!
-  #endif
-#else
-  // Otherwise, if we are not attached, we guess that the source is a
-  // sibling directory to this source root.
-  #define DIRECT_SOURCE $[standardize $[TOPDIR]/../direct]
-#endif
-
-// Where should we install DMODELS?
-#if $[DMODELS_INSTALL]
-  #define DMODELS_INSTALL $[unixfilename $[DMODELS_INSTALL]]
-#elif $[or $[CTPROJS],$[DMODELS]]
-  #define DMODELS_INSTALL $[DMODELS]/built
-  #if $[eq $[DMODELS],]
-    #error You seem to be attached to some trees, but not DMODELS!
-  #endif
-#else
-  #defer DMODELS_INSTALL $[INSTALL_DIR]
-#endif
-
-
-// Define the inter-tree dependencies.
-#define NEEDS_TREES $[NEEDS_TREES] direct
-
-
-// Also get the DIRECT Package file and everything that includes.
-#if $[not $[isfile $[DIRECT_SOURCE]/Package.pp]]
-  #printvar DIRECT_SOURCE
-  #error DIRECT source directory not found from dmodels!  Are you attached properly?
-#endif
-
-#include $[DIRECT_SOURCE]/Package.pp
-
-// Define some global variables for this tree.
-#define FLT2EGG_OPTS -no -uo ft

+ 0 - 4
dmodels/Sources.pp

@@ -1,4 +0,0 @@
-// This is the toplevel directory for a models tree.
-
-#define DIR_TYPE models_toplevel
-#define TEXATTRIB_DIR src/maps

+ 0 - 4
dmodels/src/Sources.pp

@@ -1,4 +0,0 @@
-// This is a group directory: a directory level above a number of
-// source subdirectories.
-
-#define DIR_TYPE models_group

+ 0 - 4
dmodels/src/audio/Sources.pp

@@ -1,4 +0,0 @@
-// This is a group directory: a directory level above a number of
-// source subdirectories.
-
-#define DIR_TYPE models_group

+ 0 - 9
dmodels/src/audio/sfx/Sources.pp

@@ -1,9 +0,0 @@
-#define DIR_TYPE models
-#define INSTALL_TO audio/sfx
-
-// General GUI Sounds
-#begin install_audio
-  #define SOURCES \
-    GUI_rollover.wav GUI_click.wav
-#end install_audio
-

+ 0 - 8
dmodels/src/fonts/Sources.pp

@@ -1,8 +0,0 @@
-#define DIR_TYPE models
-#define INSTALL_TO models/fonts
-
-
-//#begin install_egg
-//  #define SOURCES Comic.egg
-//#end install_egg
-

+ 0 - 14
dmodels/src/gui/Sources.pp

@@ -1,14 +0,0 @@
-#define DIR_TYPE models
-#define INSTALL_TO models/gui
-
-#begin flt_egg
-  #define SOURCES $[wildcard *.flt]
-#end flt_egg
-
-#begin install_egg
-  #define SOURCES $[patsubst %.flt,%.egg,$[wildcard *.flt]]
-#end install_egg
-
-#begin install_egg
-  #define SOURCES radio_button_gui.egg
-#end install_egg

+ 0 - 30
dmodels/src/icons/Sources.pp

@@ -1,30 +0,0 @@
-#define DIR_TYPE models
-#define INSTALL_TO icons
-
-#define fltfiles $[wildcard *.flt]
-#begin flt_egg
-  #define SOURCES $[fltfiles]
-#end flt_egg
-
-#define mayafiles $[wildcard *.mb]
-#begin maya_egg
-  #define SOURCES $[mayafiles]
-#end maya_egg
-
-#define eggfiles $[wildcard *.egg]
-#begin egg
-  #define SOURCES $[eggfiles]
-#end egg
-
-#begin install_icons
-  #define SOURCES \
-      folder.gif minusnode.gif openfolder.gif plusnode.gif python.gif \
-      sphere2.gif tk.gif dot_black.gif dot_blue.gif dot_green.gif \
-      dot_red.gif dot_white.gif
-#end install_icons
-
-#begin install_egg
-  #define SOURCES \ 
-    $[fltfiles:%.flt=%.egg] $[mayafiles:%.mb=%.egg] \
-    $[eggfiles] 
-#end install_egg

+ 0 - 13
dmodels/src/level_editor/Sources.pp

@@ -1,13 +0,0 @@
-#define DIR_TYPE models
-#define INSTALL_TO models/level_editor
-
-#begin flt_egg
-  #define SOURCES $[wildcard *.flt]
-#end flt_egg
-
-
-// #begin install_egg
-//   #define SOURCES \
-//    donalds_dock_layout.egg minnies_melody_land_layout.egg \
-//    the_burrrgh_layout.egg toontown_central_layout.egg
-// #end install_egg

+ 0 - 3
dmodels/src/maps/Sources.pp

@@ -1,3 +0,0 @@
-#define DIR_TYPE models
-
-// Nothing to install here.

+ 0 - 22
dmodels/src/misc/Sources.pp

@@ -1,22 +0,0 @@
-#define DIR_TYPE models
-#define INSTALL_TO models/misc
-
-#begin flt_egg
-  #define SOURCES $[wildcard *.flt]
-#end flt_egg
-
-
-#begin install_egg
-  #define UNPAL_SOURCES \
-    camera.egg rgbCube.egg xyzAxis.egg
-#end install_egg
-
-#begin install_egg
-  #define SOURCES \
-    gridBack.egg objectHandles.egg sphere.egg smiley.egg lilsmiley.egg
-#end install_egg
-
-#begin install_egg
-  #define SOURCES \
-    fade_sphere.egg fade.egg iris.egg
-#end install_egg

+ 0 - 41
doc/Config.pp.sample

@@ -1,41 +0,0 @@
-///////////////////////////////////////////////////////////////////////
-// Caution: there are two separate, independent build systems:
-// 'makepanda', and 'ppremake'.  Use one or the other, do not attempt
-// to use both.  This file is part of the 'ppremake' system.
-///////////////////////////////////////////////////////////////////////
-
-// This is a sample Config.pp that you may wish to use for your own
-// needs.  For a longer list of configuration variables that you may
-// set in your own Config.pp file, see dtool/Config.pp.
-
-// What level of compiler optimization/debug symbols should we build?
-// The various optimize levels are defined as follows:
-//
-//   1 - No compiler optimizations, full debug symbols
-//   2 - Full compiler optimizations, full debug symbols
-//         (if the compiler supports this)
-//   3 - Full compiler optimizations, no debug symbols
-//   4 - Full optimizations, no debug symbols, and asserts removed
-//
-// Setting this has no effect when BUILD_TYPE is "stopgap".  In this
-// case, the compiler optimizations are selected by setting the
-// environment variable OPTIMIZE accordingly at compile time.
-#define OPTIMIZE 3
-
-
-
-// If you have installed the DirectX SDK in a particular location on
-// your machine (currently, Panda requires DirectX 8.1 in order to
-// build the DirectX interfaces), then you must indicate that location
-// here.  These two variables point to the include and lib
-// directories, respectively.
-
-// Note the use of the Panda filename convention, with forward slashes
-// instead of backslashes, and /c/ instead of c:/ .
-#define DX9_IPATH /c/DXSDK-DEC2006/include
-#define DX9_LPATH /c/DXSDK-DEC2006/lib
-
-// If for any reason you need to turn off the DX9 builds,
-// you can uncomment the following line.  (Defining a
-// variable to an empty string means setting it false.)
-//#define HAVE_DX9

+ 1 - 9
doc/INSTALL-MK → doc/INSTALL

@@ -1,10 +1,3 @@
-
-///////////////////////////////////////////////////////////////////////
-// Caution: there are two separate, independent build systems:
-// 'makepanda', and 'ppremake'.  Use one or the other, do not attempt
-// to use both.  This file is part of the 'makepanda' system.
-///////////////////////////////////////////////////////////////////////
-
 Panda3D Install --- using the 'makepanda' system.
 
 NOTE: As the makepanda build system changes more frequently
@@ -44,9 +37,8 @@ directory will contain the following subdirectories:
     models	- piece 1, source code from github
     panda	- piece 1, source code from github
     pandatool	- piece 1, source code from github
-    ppremake	- piece 1, source code from github
     contrib     - piece 1, source code from github
-    samples	- piece 1, sample programs
+    samples	- piece 1, source code from github
     thirdparty  - piece 2, third party tools
 
 

+ 0 - 885
doc/INSTALL-PP

@@ -1,885 +0,0 @@
-
-///////////////////////////////////////////////////////////////////////
-// Caution: there are two separate, independent build systems:
-// 'makepanda', and 'ppremake'.  Use one or the other, do not attempt
-// to use both.  This file is part of the 'ppremake' system.
-///////////////////////////////////////////////////////////////////////
-
-Panda3D Install --- using the 'ppremake' system.
-
-This document describes how to compile and install Panda 3D on a
-system for the first time.  Panda is a complex project and is not
-trivial to install, although it is not really very difficult.  Please
-do take the time to read this document before starting.
-
-Panda is known to build successfully on Linux, SGI Irix, and Windows
-NT/2000/XP.  It should also be easily portable to other Unix-based
-OpenGL systems with little or no changes (please let us know if you
-try this).  When compiled by Windows NT/2000/XP, it will then run on a
-Windows 98 system, but we have found that Windows 98 is not itself
-stable enough to compile the codebase without crashing.
-
-Before you begin to compile Panda, there are a number of optional
-support libraries that you may wish to install.  None of these are
-essential; Panda will build successfully without them, but possibly
-without some functionality.
-
-* Python.  Panda is itself a C++ project, but it can generate a
-  seamless Python interface layer to its C++ objects and function
-  calls.  Since Python is an interpreted language with a command
-  prompt, this provides an excellent way to get interactive control
-  over the 3-D environment.  However, it is not necessary to use the
-  Python interface; Panda is also perfectly useful without Python, as
-  a C++ 3-D library.
-
-  Other scripting language interfaces are possible, too, in theory.
-  Panda can generate an interface layer for itself that should be
-  accessible by any scripting language that can make C function calls
-  to an external library.  We have used this in the past, for
-  instance, to interface Panda with Squeak, an implementation of
-  Smalltalk.  At the present, the Python interface is the only one we
-  actively maintain.  We use Python 2.2, but almost any version should
-  work; you can get Python at http://www.python.org .
-
-* NSPR.  This is the Netscape Portable Runtime library, an OS
-  compatibility layer written by the folks at Mozilla for support of
-  the Netscape browser on different platforms.  Panda takes advantage
-  of NSPR to implement threading and network communications.  At the
-  present, if you do not have NSPR available Panda will not be able to
-  fork threads and will not provide a networking interface.  Aside
-  from that, the PStats analysis tools (which depend on networking)
-  will not be built without NSPR.  We have compiled Panda with NSPR
-  version 3 and 4.0, although other versions should also work.  You
-  can download NSPR from http://www.mozilla.org/projects/nspr/ .
-
-* VRPN, the "Virtual Reality Peripheral Network," a peripheral
-  interface library designed by UNC.  This is particularly useful for
-  interfacing Panda with external devices like trackers and joysticks;
-  without it, Panda can only interface with the keyboard and mouse.
-  You can find out about it at http://www.cs.unc.edu/Research/vrpn .
-
-* libjpeg, libtiff, libpng.  These free libraries provide support to
-  Panda for reading and writing JPEG, TIFF, and PNG image files, for
-  instance for texture images.  Even without these libraries, Panda
-  has built-in support for pbm/pgm/ppm, SGI (rgb), TGA, BMP, and a few
-  other assorted image types like Alias and SoftImage native formats.
-  Most Linux systems come with these libraries already installed, and
-  the version numbers of these libraries is not likely to be
-  important.  You can download libjpeg from the Independent JPEG group
-  at http://www.ijg.org , libtiff from SGI at
-  ftp://ftp.sgi.com/graphics/tiff , and libpng from
-  http://www.libpng.org .
-
-* zlib.  This very common free library provides basic
-  compression/decompression routines, and is the basis for the Unix
-  gzip tool (among many other things).  If available, Panda uses it to
-  enable storing compressed files within its native multifile format,
-  as well as in a few other places here and there.  It's far from
-  essential.  If you don't have it already, you can get it at
-  http://www.gzip.org/zlib .
-
-* Fmod.  This is a free sound library that our friends at CMU have
-  recently integrated into Panda.  It provides basic support for
-  playing WAV files, MP3 files, and MIDI files within Panda.  Get it
-  at http://www.fmod.org .
-
-* Freetype.  This free library provides support for loading TTF font
-  files (as well as many other types of font files) directly for
-  rendering text within Panda (using Panda's TextNode interface, as
-  well as the whole suite of DirectGui 2-d widgets in direct).  If you
-  do not have this library, you can still render text in Panda, but
-  you are limited to using fonts that have been pre-generated and
-  stored in egg files.  There are a handful of provided font files of
-  this nature in the models directory (specifically, cmr12, cmss12,
-  and cmtt12); these were generated from some of the free fonts
-  supplied with TeX.  This can be found at http://www.freetype.org ;
-  you will need at least version 2.0.
-
-* OpenSSL.  This free library provides an interface to secure SSL
-  communications (as well as a normal, unsecured TCP/IP library).  It
-  is used to implement the HTTP client code in Panda for communicating
-  with web servers and/or loading files directly from web servers, in
-  both normal http and secure https modes.  It also provides some
-  basic encryption services, allowing encrypted files to be stored in
-  metafiles (for instance).  If you do not have any need to contact
-  web servers with your Panda client, and you have no interest in
-  encryption, you do not need to install this library.  Find it at
-  http://www.openssl.org .  We used version 0.9.6 or 0.9.7, but if
-  there is a more recent version it should be fine.
-
-* FFTW, the "Fastest Fourier Transform in the West".  This free
-  whimsically-named library provides the mathematical support for
-  compressing animation tables into Panda's binary bam format.  If
-  enabled, animation tables can be compressed in a lossy form similar
-  to jpeg, which provides approximately a 5:1 compression ratio better
-  than gzip alone even at the most conservative setting.  If you don't
-  need to have particularly small animation files, you don't need this
-  library.  Get it at http://www.fftw.org .
-
-* Gtk--.  This is a C++ graphical toolkit library, and is only used
-  for one application, the PStats viewer for graphical analysis of
-  real-time performance, which is part of the pandatool package.
-  Gtk-- only compiles on Unix, and primarily Linux; it is possible to
-  compile it with considerable difficulty on Irix.  (On Windows, you
-  don't need this, since you will use the pstats viewer built in the
-  win-stats subdirectory instead.)  We have used version 1.2.1.  You
-  can find it at http://www.gtkmm.org .
-
-
-PANDA'S BUILD PHILOSOPHY 
-
-Panda is divided into a number of separate packages, each of which
-compiles separately, and each of which generally depends on the ones
-before it.  The packages are, in order:
-
-  dtool - this defines most of the build scripts and local
-    configuration options for Panda.  It also includes the program
-    "interrogate," which is used to generate the Python interface, as
-    well as some low-level libraries that are shared both by
-    interrogate and Panda.  It is a fairly small package.
-
-  panda - this is the bulk of the C++ Panda code.  It contains the 3-D
-    engine itself, as well as supporting C++ interfaces like
-    networking, audio, and device interfaces.  Expect this package to
-    take from 30 to 60 minutes to build from scratch.  You must build
-    and install dtool before you can build panda.
-
-  direct - this is the high-level Python interface to Panda.  Although
-    there is some additional C++ interface code here, most of the code
-    in this package is Python; there is no reason to install this
-    package if you are not planning on using the Python interface.
-    DIRECT is an acronym, and has nothing to do with DirectX.
-    You must build and install dtool and panda before you can build
-    direct.
-
-  pandatool - this is a suite of command-line utilities, written in
-    C++ using the Panda libraries, that provide useful support
-    functionality for Panda as a whole, like model-conversion
-    utilities.  You must build and install dtool and panda before you
-    can build pandatool, although it does not depend on direct.
-
-  pandaapp - this holds a few sample applications that link with panda
-    (and pandatool), but are not generally useful enough to justify
-    putting them in pandatool.  Most of these are not actually
-    graphical applications; they just take advantage of the various
-    support libraries (like HTTPClient) that Panda provides.  At the
-    moment, most people probably won't find anything useful here, but
-    you're welcome to browse; and we will probably add more
-    applications later.  You must build and install dtool, panda, anda
-    pandatool before you can build pandaapp.
-
-In graphical form, here are the packages along with a few extras:
-
-  +------------------------------+
-  | Your Python Application Here |
-  +------------------------------+
-      |
-      |       +-----------+
-      |       | pandaapp  |
-      |       +-----------+
-      |             |           
-      V             V
-  +--------+  +-----------+  +---------------------------+
-  | direct |  | pandatool |  | Your C++ Application Here |
-  +--------+  +-----------+  +---------------------------+
-      |             |                   |
-      +-------------+-------------------/
-      V
-  +-------+
-  | panda |
-  +-------+
-      |
-      V
-  +-------+
-  | dtool |
-  +-------+
-
-The arrows above show dependency.
-
-
-Usually, these packages will be installed as siblings of each other
-within the same directory; the build scripts expect this by default,
-although other installations are possible.
-
-In order to support multiplatform builds, we do not include makefiles
-or project files with the sources.  Instead, all the compilation
-relationships are defined in a series of files distributed throughout
-the source trees, one per directory, called Sources.pp.
-
-A separate program, called ppremake ("Panda pre-make") reads the
-various Sources.pp files, as well as any local configuration
-definitions you have provided, and generates the actual makefiles that
-are appropriate for the current platform and configuration.  It is
-somewhat akin to the idea of GNU autoconf ("configure"), although it
-is both less automatic and more general, and it supports non-Unix
-platforms easily.
-
-
-HOW TO CONFIGURE PANDA FOR YOUR ENVIRONMENT
-
-When you run ppremake within a Panda source tree, it reads in a number
-of configuration variable definitions given in the file Config.pp in
-the root of the dtool package, as well as in a custom Config.pp file
-that you specify.  Many of the variables in dtool/Config.pp will
-already have definitions that are sensible for you; some will not.
-You must customize these variables before you run ppremake.
-
-Normally, rather than modifying dtool/Config.pp directly, you should
-create your own, empty Config.pp file.  By default, this file should
-be stored in the root of the Panda install directory, as specified
-when you built ppremake, but you may put it elsewhere if you prefer by
-setting the environment variable PPREMAKE_CONFIG to its full filename
-path (more on this in the platform-specific installation notes,
-below).
-
-The definitions you give in your personal Config.pp file will override
-those given in the file within dtool.  It is also possible simply to
-modify dtool/Config.pp, but this is not recommended as it makes it
-difficult to remember which customizations you have made, and makes
-installing updated versions of Panda problematic.
-
-The syntax of the Config.pp file is something like a cross between the
-C preprocessor and Makefile syntax.  The full syntax of ppremake input
-scripts is described in more detail in another document, but the most
-common thing you will need to do is set the value of a variable using
-the #define statement (or the mostly equivalent #defer statement).
-Look in dtool/Config.pp for numerous examples of this.
-
-Some of the variables you may define within the Config.pp file hold a
-true or a false value by nature.  It is important to note that you
-indicate a variable is true by defining it to some nonempty string
-(e.g. "yes" or "1"), and false by defining it to nothing.  For
-example:
-
-  #define HAVE_DX9 1
-
-Indicates you have the DirectX SDK installed, while
-
-  #define HAVE_DX9
-
-Indicates you do not.  Do not be tempted to define HAVE_DX9 to no or 0;
-since these are both nonempty strings, they are considered to
-represent true!  Also, don't try to use a pair of quotation marks to
-represent the empty string, since the quotation marks become part of
-the string (which is thus nonempty).
-
-The comments within dtool/Config.pp describe a more complete list of
-the variables you may define.  The ones that you are most likely to
-find useful are:
-
-  INSTALL_DIR - this is the prefix of the directory hierarchy into
-    which Panda should be installed.  If this is not defined, the
-    default value is compiled into ppremake.  A full description on
-    setting this parameter is given below in the section describing
-    how to build ppremake.  On Unix systems this is taken from the
-    --prefix parameter to configure (usually /usr/local/panda); for
-    Windows users it is specified in config_msvc.h, and is set to
-    C:\Panda3d unless you modify it.
-
-  OPTIMIZE - define this to 1, 2, 3, or 4.  This is not the same thing
-    as compiler optimization level; our four levels of OPTIMIZE define
-    broad combinations of compiler optimizations and debug symbols:
-
-     1 - No compiler optimizations, full debug symbols
-           Windows: debug heap
-     2 - Full compiler optimizations, debug symbols
-           Windows: debug heap
-     3 - Full compiler optimizations,
-           Unix: no debug symbols
-           Windows: non-debug heap, debug symbols available in pdb files
-     4 - Full optimizations, no debug symbols, and asserts removed
-           Windows: non-debug heap
-
-    Usually OPTIMIZE 3 is the most appropriate choice for development
-    work.  We recommend OPTIMIZE 4 only for final QA and/or
-    distribution of a shippable product, never for any development or
-    alpha testing; and we recommend OPTIMIZE levels 1 and 2 only for
-    active development of the C++ code within Panda.
-
-  PYTHON_IPATH / PYTHON_LPATH / PYTHON_LIBS - the full pathname to
-    Python header files, if Python is installed on your system.  As of
-    Python version 2.0, compiling Python interfaces doesn't require
-    linking with any special libraries, so normally PYTHON_LPATH and
-    PYTHON_LIBS are left empty.  You definitely need to set
-    PYTHON_IPATH, however, if you wish to compile Panda so that it can
-    be used from Python.
-
-  NSPR_IPATH / NSPR_LPATH / NSPR_LIBS - the full pathname to NSPR
-    header and library files, and the name of the NSPR library, if
-    NSPR is installed on your system.
-
-  VRPN_IPATH / VRPN_LPATH / VRPN_LIBS - the full pathname to VRPN
-    header and library files, and the name of the VRPN libraries, if
-    VRPN is installed on your system.
-
-  DX9_IPATH / DX9_LPATH / DX9_LIBS - the full pathname to the
-    DirectX 9 SDK header and library files, if you have installed
-    this SDK. (You must currently install this SDK in order to
-    build DirectX9 support for Panda.)
-
-  GL_IPATH / GL_LPATH / GL_LIBS - You get the idea.  (Normally, OpenGL
-    is installed in the standard system directories, so you can leave
-    GL_IPATH and GL_LPATH empty.  But if they happen to be installed
-    somewhere else on your machine, you can fill in the pathnames
-    here.)
-
-  Similar *_IPATH / *_LPATH / *_LIBS variables for other optional
-    third-party libraries.
-
-
-
-HOW TO BUILD PANDA ON A UNIX SYSTEM
-
-First, make a subdirectory to hold the Panda sources.  This can be
-anywhere you like; in these examples, we'll assume you build
-everything within a directory called "panda3d" in your home directory.
-
-  mkdir ~/panda3d
-
-You should also create the directory into which panda should be
-installed.  The default installation directory is /usr/local/panda.
-You may choose an alternate installation directory by using the
---prefix parameter to the ppremake configure script, described below.
-We recommend giving yourself write permission to this directory, so
-that you can run 'make install' and similar scripts that will need to
-write to this installation directory, without having to be root.
-  su root
-  mkdir /usr/local/panda
-  chown <your-user-name> /usr/local/panda
-  exit
-
-Whatever you choose for your installation directory, you should make
-sure the bin directory (e.g. /usr/local/panda/bin) is included on your
-search path, and the lib directory (e.g. /usr/local/panda/lib) is on
-your LD_LIBRARY_PATH.  If you use a C-shell derivative like tcsh, the
-syntax for this is:
-
-  set path=(/usr/local/panda/bin $path)
-  setenv LD_LIBRARY_PATH /usr/local/panda/lib:$LD_LIBRARY_PATH
-
-If you have a Bourne-shell derivative, e.g. bash, the syntax is:
-
-  PATH=/usr/local/panda/bin:$PATH
-  LD_LIBRARY_PATH=/usr/local/panda/lib:$LD_LIBRARY_PATH
-  export LD_LIBRARY_PATH
-
-
-You must now compile ppremake before you can begin to compile Panda
-itself.  Generally, you do something like the following:
-
-  cd ~/panda3d/ppremake
-  ./configure
-  make
-  make install
-
-If the configure script does not already exist, read the document
-BUILD_FROM_CVS.txt in the ppremake source directory.
-
-As mentioned above, the default installation directory is
-/usr/local/panda.  Thus, ppremake will install itself into
-/usr/local/panda/bin.  If you prefer, you can install Panda into
-another directory by doing something like this:
-
-  ./configure --prefix=/my/install/directory
-  make
-  make install
-
-Now you should create your personal Config.pp file, as described
-above, and customize whatever variables are appropriate.  By default,
-ppremake will look for this file in the root of the install directory,
-e.g. /usr/local/panda/Config.pp.  If you want to put it somewhere
-else, for instance in your home directory, you must set the
-PPREMAKE_CONFIG environment variable to point to it:
-
-  setenv PPREMAKE_CONFIG ~/Config.pp
-
-In bash:
-
-  PPREMAKE_CONFIG=~/Config.pp
-  export PPREMAKE_CONFIG
-
-You may find it a good idea to make this and other environment
-settings in your .cshrc or .bashrc file so that they will remain set
-for future sessions.
-
-Now you can test the configuration settings in your Config.pp file:
-
-  cd ~/panda3d/dtool
-  ppremake
-
-When you run ppremake within the dtool directory, it will generate a
-file, dtool_config.h (as well as all of the Makefiles).  This file
-will be included by all of the Panda3D sources, and reveals the
-settings of many of the options you have configured.  You should
-examine this file now to ensure that your settings have been made the
-way you expect.
-
-Note that ppremake will also try to create several subdirectories in
-the install directory, so you must have write access to the install
-directory in order for ppremake to run completely successfully.  If
-you did not choose to give yourself write access to the install
-directory, you may run ppremake as root; in this case we recommend
-running ppremake first as a normal user in order to compile, and then
-running ppremake again as root just before running make install as
-root.
-
-Now that you have run ppremake, you can build the Panda3D sources.
-Begin with dtool (the current directory):
-
-  make
-  make install
-
-Once you have successfully built and installed dtool, you can then
-build and install panda:
-
-  cd ~/panda3d/panda
-  ppremake
-  make
-  make install
-
-After installing panda, you are almost ready to run the program
-"pview," which is a basic model viewer program that demonstrates some
-Panda functionality (see HOW TO RUN PANDA, below).  Successfully
-running pview proves that Panda is installed and configured correctly
-(at least as a C++ library).
-
-If you wish, you may also build direct.  You only need to build this
-if you intend to use the Python interfaces.
-
-  cd ~/panda3d/direct
-  ppremake
-  make
-  make install
-
-And you may build pandatool.  You only need to build this if you want
-to take advantage of model conversion utilities for Panda like
-maya2egg and egg2bam, or if you want to use other tools like pstats.
-
-  cd ~/panda3d/pandatool
-  ppremake
-  make
-  make install
-
-
-
-HOW TO BUILD PANDA ON A WINDOWS SYSTEM, USING CYGWIN
-
-Cygwin is a set of third-party libraries and tools that present a very
-Unix-like environment for Windows systems.  If you prefer to use a
-Unix environment, Cygwin is the way to go.  You can download Cygwin
-for free from http://www.cygwin.com.
-
-Panda can build and run within a Cygwin environment, but it does not
-require it.  Note that Cygwin is used strictly as a build environment;
-the Cygwin compiler is not used, so no dependency on Cygwin will be
-built into Panda.  The Panda DLL's that you will generate within a
-Cygwin environment will be exactly the same as those you would
-generate in a non-Cygwin environment; once built, Panda will run
-correctly on any Win32 machine, with or without Cygwin installed.
-
-If you do not wish to install Cygwin for your build environment, see
-the instructions below.
-
-If you wish to use Cygwin, there is one important point to keep in
-mind.  Panda internally uses a Unix-like filename convention; that is,
-forward slashes (instead of backslashes) separate directory
-components, and there is no leading drive letter on any filename.
-These Unix-like filenames are mapped to Windows filenames (with drive
-letters and backslashes) when system calls are made.
-
-Cygwin also uses a Unix-like filename convention, and uses a series of
-mount commands to control the mapping of Unix filenames to Windows
-filenames.  Panda is not itself a Cygwin program, and does not read
-the Cygwin mount definitions.
-
-That's important enough it's worth repeating.  Panda is not aware of
-the Cygwin mount points.  So a Unix-like filename that makes sense to
-a Cygwin command may not be accessible by the same filename from
-within Panda.
-
-However, you can set things up so that most of the time, Cygwin and
-Panda agree, which is convenient.  To do this, it is important to
-understand how Panda maps Unix-like filenames to Windows filenames.
-
-  * Any relative pathname (that is, a pathname that does not begin
-    with a leading slash) is left unchanged, except to reverse the
-    slashes.
-
-  * Any full pathname whose topmost directory component is *not* a
-    single letter is prepended with the contents of the environment
-    variable PANDA_ROOT.
-
-  * Any full pathname whose topmost directory component *is* a single
-    letter is turned into a drive letter and colon followed by the
-    remainder of the path.  For example, /c/windows/system is turned
-    into C:\windows\system.
-
-The expectation is that most of the files you will want to access
-within Panda will all be within one directory structure, which you
-identify by setting the PANDA_ROOT variable.  Generally, when you are
-using Cygwin, you will want to set this variable to be the same thing
-as the root of your Cygwin tree.
-
-For instance, typically Cygwin installs itself in C:\Cygwin.  This
-means that when you reference the directory /usr/local/bin within
-Cygwin, you are actually referring to C:\Cygwin\usr\local\bin.  You
-should therefore set PANDA_ROOT to C:\Cygwin, so that /usr/local/bin
-within Panda will also refer to C:\Cygwin\usr\local\bin.
-
-To sum up: to use Panda within a Cygwin environment,
-
-In tcsh:
-
-  setenv PANDA_ROOT 'C:\Cygwin'
-
-or in bash:
-
-  PANDA_ROOT='C:\Cygwin'
-  export PANDA_ROOT
-
-(In fact, you do not actually have to set PANDA_ROOT if Cygwin is
-installed into C:\Cygwin, since this is Panda's default behavior if
-C:\Cygwin exists.  But it's important to understand what Panda is
-doing to remap directories, and in particular that there is no
-relationship to any actual Cygwin mount points.)
-
-There is one additional point: you will need to ensure that the Visual
-Studio command-line utilities (like cl.exe) are available on your
-path.  Set your path appropriately to point to them, if necessary (or
-run vcvars32.bat to do it for you; see the paragraph below.)
-
-Follow the instructions under HOW TO BUILD PANDA FOR A UNIX
-ENVIRONMENT, above.
-
-
-
-HOW TO BUILD PANDA ON A WINDOWS SYSTEM, WITHOUT CYGWIN
-
-You will have to make sure that you installed the command-line
-utilities on your system path when you installed Visual Studio, or you
-can run the batch file vcvars32.bat to put these utilities on your
-path for the current session (this batch file is in a directory like
-c:\Program Files\Microsoft Visual Studio .Net\Vc7\bin).
-
-Microsoft provides a command-line make utility with Visual Studio
-called nmake, although it's nowhere near as robust as the GNU make
-utility provided with Cygwin.  But Panda can generate Makefiles that
-follow the nmake convention, and will do so by default if your
-ppremake was not built with the Cygwin tools.  
-
-You will need a directory for holding the installed Panda.  This can
-be anywhere you like; the default is C:\Panda3d.  If you choose to
-specify otherwise you should modify the INSTALL_DIR line in
-ppremake\config_msvc.h before you build ppremake (below).
-(Alternatively, you can leave ppremake alone and simply redefine
-INSTALL_DIR in your Config.pp file, but then you will also need to
-define the environment variable PPREMAKE_CONFIG to point to your
-Config.pp.)
-
-  md C:\Panda3d
-
-You will first need to build a copy of ppremake.exe.  There is a
-Microsoft VC7 project file in the ppremake directory that will build
-this.  Once it is built, copy it to the Panda bin directory (which you
-will have to make yourself).  This will be a directory called "bin"
-below the root of the installed directory you created above; for
-instance, C:\Panda3d\bin.
-
-Make sure the Panda bin and lib directories are on your path, and set
-a few environment variables for building.  We suggest creating a file
-called PandaEnv.bat to hold these commands; then you may invoke this
-batch file before every Panda session to set up your environment
-properly.  Alternatively, you may make these definitions in the
-registry.
-
-  path C:\Panda3d\bin;C:\Panda3d\lib;%PATH%
-  set PANDA_ROOT=C:\
-
-Setting PANDA_ROOT is optional; it specifies the default drive Panda
-will search for file references.  (Panda internally uses a Unix-like
-filename convention, which does not use leading drive letters.  See
-the bullet points in the Cygwin section, above, describing the rules
-Panda uses to map its Unix-like filenames to Windows filenames.)
-
-Now make a directory for building Panda.  This may be different from
-the directory, above, that holds the installed Panda files; or it may
-be the same.  In this example we assume you will be building in the
-same directory, C:\Panda3d.
-
-Now set up your personal Config.pp file to control your local
-configuration settings, as described above.  By default, ppremake will
-look for this file in the root of the install directory,
-e.g. C:\Panda3d\Config.pp; if you want to put it somewhere else you
-should define the environment variable PPREMAKE_CONFIG to the full
-path to your Config.pp.
-
-Use your favorite text editor to add the appropriate lines to your
-Config.pp to define the correct paths to the various third-party
-packages you have installed on your system.  See HOW TO CONFIGURE
-PANDA FOR YOUR ENVIRONMENT, above.
-
-  edit C:\Panda3d\Config.pp
-
-
-Now you can test the configuration settings in your Config.pp file:
-
-  C:
-  cd \Panda3d\dtool
-  ppremake
-
-When you run ppremake within the dtool directory, it will generate a
-file, dtool_config.h (as well as all of the Makefiles).  This file
-will be included by all of the Panda3D sources, and reveals the
-settings of many of the options you have configured.  You should
-examine this file now to ensure that your settings have been made the
-way you expect.
-
-Now that you have run ppremake, you can build the Panda3D sources.
-Begin with dtool (the current directory):
-
-  nmake
-  nmake install
-
-Once you have successfully built and installed dtool, you can then
-build and install panda:
-
-  cd \Panda3d\panda
-  ppremake
-  nmake
-  nmake install
-
-After installing panda, you are almost ready to run the program
-"pview," which is a basic model viewer program that demonstrates some
-Panda functionality (see HOW TO RUN PANDA, below).  Successfully
-running pview proves that Panda is now installed and configured
-correctly (at least as a C++ library).
-
-If you wish, you may also build direct.  You only need to build this
-if you intend to use the Python interfaces.
-
-  cd \Panda3d\direct
-  ppremake
-  nmake
-  nmake install
-
-And you may build pandatool.  You only need to build this if you want
-to take advantage of model conversion utilities for Panda like
-maya2egg and egg2bam, or if you want to use other tools like pstats.
-
-  cd \Panda3d\pandatool
-  ppremake
-  nmake
-  nmake install
-
-
-
-
-
-HOW TO RUN PANDA
-
-Once Panda has been successfully built and installed, you should be
-able to run pview to test that everything is working (you might need
-to type rehash first if you use csh):
-
-  pview
-
-If you get an error about some shared library or libraries not being
-found, check that your LD_LIBRARY_PATH setting (on Unix) or your PATH
-(on Windows) includes the directory in which all of the Panda
-libraries have been installed.  (This is normally $INSTALL_DIR/lib, or
-whatever you set INSTALL_DIR to followed by "lib".  On Unix, this
-defaults to /usr/local/panda/lib.  If you have redefined
-INSTALL_LIB_DIR in your Config.pp, for instance to define Panda as a
-native Python module, you should use that directory instead.)
-
-If all goes well, pview should open up a window with a blue triangle.
-You can use the mouse to move the triangle around.  You can also pass
-on the command line the name of an egg or bam file, if you have one
-(look in the models directory for some sample egg files), and pview
-will load up and display the model.
-
-
-There are several files in the $INSTALL_DIR/etc directory with the
-filename extension .prc; these are Panda Runtime Configuration files.
-These are different from the Config.pp file, which controls the way
-Panda is compiled and is only used at build time.  The prc files are
-read in every time Panda is started and control the way Panda behaves
-at runtime.
-
-The system-defined prc files begin with digits, so that they sort to
-the top of the list and are read first (and so that you may define one
-or more additional files that are read afterwards and that will
-therefore override the values specified in these system files).  The
-digits also imply an ordering between the prc files.  We recommend
-that you name your own prc file(s) beginning with letters, unless for
-some reason you need a file to be loaded before one of the
-system-defined prc files.
-
-We suggest creating a file in $INSTALL_DIR/etc called Config.prc, into
-which you will put your own custom configuration options.  For
-instance, if you want to run using OpenGL instead of the Windows
-default of DirectX9, you can add the line:
-
-load-display pandagl
-
-to your Config.prc file.  If you choose not to do this at this time,
-you can just leave this file empty for now; however, we do recommend
-creating at least an empty Config.prc file as a placeholder into which
-you can add your custom configuration options later.
-
-The complete list of available configuration options is very large and
-is not fully documented; but there are other documents that list
-several particularly useful config variables.  These are sometimes
-referred to as "Configrc" variables because an older Panda convention
-named this file Configrc instead of Config.prc.
-
-If you want to load Config.prc from other than the compiled-in default
-directory of $INSTALL_DIR/etc, set the environment variable:
-
-  PRC_DIR=/my/home/directory
-  export PRC_DIR
-
-Where /my/home/directory is the name of your home directory (or
-wherever you put the Config.prc file).  Note that if you redefine
-PRC_DIR, you will no longer automatically load the standard prc files
-that were installed into $INSTALL_DIR/etc (so you should consider
-copying these files into the same directory).  It is possible to
-configure Panda to search for prc files in more than one directory,
-but that's a little more complicated and is outside the scope of this
-document.
-
-
-
-
-
-HOW TO BUILD THE PYTHON INTERFACES
-
-You may stop now if you only intend to use Panda as a C++ library.
-However, if you wish to use Panda from within Python, you must now
-generate the Python interfaces.
-
-There are two parts to the Python interface for Panda.  The first part
-is a series of wrapper functions that are compiled into the Panda
-libraries themselves, along with associated *.in files that describe
-the class hierarchy.  If you defined PYTHON_IPATH correctly in your
-Config.pp file, then Python should have been detected by ppremake, and
-it would have generated makefiles to build these wrappers
-automatically.  (You would have seen the program "interrogate" running
-within each directory as panda was building, and you will have a
-number of *.in files now installed into $INSTALL_DIR/etc.)
-
-If, for some reason, the interrogate program did not run, perhaps
-because you defined an invalid directory in PYTHON_IPATH, you can go
-back and fix this now, and simply re-run ppremake and make install
-again in each of dtool, panda, and direct.
-
-To make Panda accessible to Python, you will need to add
-$INSTALL_DIR/lib to your PYTHONPATH variable, e.g.:
-
-  setenv PYTHONPATH ${PYTHONPATH}:/usr/local/panda/lib
-
-Or, on Windows:
-
-  set PYTHONPATH=%PYTHONPATH%;C:\Panda3d\lib
-
-We recommend the PYTHONPATH approach for most users, since it keeps
-all of the Panda files within one directory and doesn't clutter up the
-Python distribution.  However, if you only intend to use Panda from
-Python, and especially if you want to make it accessible to multiple
-users, it may be more attractive to install the Panda libraries as a
-standard Python module, so that it is not necessary to modify your
-PYTHONPATH variable; see "Installing Panda as a standard Python
-module", below.
-
-The second part to the Python interface is a series of generated
-Python wrapper classes, for each C++ class detected by interrogate.
-These classes must be generated after all of the C++ code has been
-compiled and installed.  Execute the following command (you might need
-to type rehash first if you use csh):
-
-  genPyCode
-
-This is a script that was installed into $INSTALL_DIR/bin as part of
-the build of direct.  It invokes Python to read the *.in files
-generated by interrogate, and generates the appropriate wrapper
-functions, which are then written into $INSTALL_DIR/lib/pandac.
-(There will be several hundred generated Python modules, which are
-normally "squeezed" into a single file called PandaModules.pyz using
-PythonWare's SqueezeTool.  This squeeze step gives a significant
-load-time speedup, especially on Windows; but if it causes problems,
-you can use the option -n, e.g. 'genPyCode -n', to avoid it.)
-
-You will need to re-run this script only if the Panda interface
-changes, e.g. if a class is added or a method's parameters change.
-You should certainly re-run it any time you update and install a new
-version of Panda.
-
-
-Installing Panda as a native Python module
-
-Panda can be optionally configured to install its run-time interfaces
-into the Python installation directory, instead of into the normal
-$INSTALL_DIR/lib directory.  This means you can run Panda from Python
-without having to set your PYTHONPATH variable, but it does clutter up
-your Python distribution a bit.
-
-To do this, simply add something like the following line to your
-Config.pp:
-
-  #define INSTALL_LIB_DIR /usr/lib/python2.2/site-packages
-
-Where you give the actual path to the site-packages directory for your
-particular installation of Python.  On Windows, this will probably
-look something like this:
-
-  #define INSTALL_LIB_DIR C:\Python22\Lib\site-packages
-
-Then go back and re-run ppremake and make install in each of dtool,
-panda, and direct, and then re-run genPyCode, to install the Panda
-libraries and Python files directly into the Python site-packages
-directory.
-
-You may also need to set your LD_LIBRARY_PATH (on Unix) or PATH (on
-Windows) to reference this new directory instead of $INSTALL_DIR/lib,
-especially if you want to be able to run any of the Panda standalone
-programs occasionally, like pview or any of the model converters.
-
-Unix users should note that you must have write permission to the
-site-packages directory in order to install files there.  You may
-choose to run these install steps (ppremake, make install, genPyCode)
-as root to avoid this problem.  If you encounter difficulty running
-genPyCode as root, make sure that you still have LD_LIBRARY_PATH
-defined appropriately once you have become root.
-
-
-Testing the Python interface
-
-Assuming that you have already set up your Config.prc file and tested
-that pview works, as described above in HOW TO RUN PANDA, you should
-now be ready to try to run Panda from within Python.  Start up a
-Python shell and type the following command:
-
-  Python 2.2.2 (#37, Feb 10 2003, 18:00:06) [MSC 32 bit (Intel)] on win32
-  Type "help", "copyright", "credits" or "license" for more information.
-  >>> import direct.directbase.DirectStart
-
-You should see a graphics window come up, very similar to the one you
-saw when you ran pview.  To load a particular model file into the
-scene, try something like this:
-
-  >>> m = loader.loadModel('/c/Panda3d/models/smiley.egg')
-  >>> m.reparentTo(render)
-  >>> run()
-
-Note that Panda expects a forward-slash convention for pathnames, with
-no leading drive letter, even on a Windows system.  See the full
-description of how Panda maps these pathnames to Windows pathnames in
-HOW TO BUILD PANDA ON A WINDOWS SYSTEM, USING CYGWIN, above.
-
-You can now move the scene around with the mouse, just as in pview
-(you may need to pull the camera back by dragging upwards while
-holding down the right mouse button in order to see the model).
-
-Congratulations!  Panda 3D is now successfully installed.  See the
-online documentation available at http://www.etc.cmu.edu/panda3d/ for
-more help about where to go next.

+ 0 - 374
dtool/Config.Android.pp

@@ -1,374 +0,0 @@
-//
-// Config.Android.pp
-//
-// This file defines some custom config variables for the Android
-// platform.  It makes some initial guesses about compiler features,
-// etc.
-//
-
-// *******************************************************************
-// NOTE: you should not attempt to copy this file verbatim as your own
-// personal Config.pp file.  Instead, you should start with an empty
-// Config.pp file, and add lines to it when you wish to override
-// settings given in here.  In the normal ppremake system, this file
-// will always be read first, and then your personal Config.pp file
-// will be read later, which gives you a chance to override the
-// default settings found in this file.  However, if you start by
-// copying the entire file, it will be difficult to tell which
-// settings you have customized, and it will be difficult to upgrade
-// to a subsequent version of Panda.
-// *******************************************************************
-
-// Android is a Linux distribution.
-#define IS_LINUX 1
-
-// These libraries are provided by the Android NDK.
-#define ZLIB_IPATH
-#define ZLIB_LPATH
-#define ZLIB_LIBS z
-#define HAVE_ZLIB 1
-
-#define GLES_IPATH
-#define GLES_LPATH
-#define GLES_LIBS GLESv1_CM
-#define HAVE_GLES 1
-
-#define GLES2_IPATH
-#define GLES2_LPATH
-#define GLES2_LIBS GLESv2
-#define HAVE_GLES2 1
-
-#define EGL_IPATH
-#define EGL_LPATH
-#define EGL_LIBS EGL
-#define HAVE_EGL 1
-
-// We don't have these, of course, so let's disable
-// them for convenience in case they were autodetected.
-#define HAVE_DX9
-#define HAVE_CG
-
-// Compiler flags
-#defer TOOLCHAIN_PATH $[ANDROID_NDK_HOME]/toolchains/$[ANDROID_TOOLCHAIN]/prebuilt/windows/bin
-#defer TOOLCHAIN_PREFIX $[if $[eq $[ANDROID_ABI],x86],i686-linux-android,$[ANDROID_ABI]]
-#defer CC $[TOOLCHAIN_PATH]/$[TOOLCHAIN_PREFIX]-gcc
-#defer CXX $[TOOLCHAIN_PATH]/$[TOOLCHAIN_PREFIX]-g++
-#defer AR $[TOOLCHAIN_PATH]/$[TOOLCHAIN_PREFIX]-ar
-#define C++FLAGS_GEN -fno-exceptions -fno-rtti
-
-#defer SYSROOT $[ANDROID_NDK_HOME]/platforms/$[ANDROID_PLATFORM]/arch-$[ANDROID_ARCH]
-#defer SYSROOT_FLAGS --sysroot=$[subst \,/,$[osfilename $[SYSROOT]]]
-
-#defer EXTRA_IPATH $[ANDROID_NDK_HOME]/sources/android/native_app_glue $[SYSROOT]/usr/include
-#defer EXTRA_LPATH $[SYSROOT]/usr/lib
-#defer EXTRA_LIBS $[if $[eq $[BUILD_TYPE],android],,c m]
-
-// Define the CFLAGS and LDFLAGS settings for the various architectures.
-#defer ANDROID_arm_CFLAGS\
- -fpic\
- -ffunction-sections\
- -funwind-tables\
- -fstack-protector\
- -D__ARM_ARCH_5__ -D__ARM_ARCH_5T__\
- -D__ARM_ARCH_5E__ -D__ARM_ARCH_5TE__\
- $[if $[eq $[ANDROID_ABI],armeabi-v7a],-march=armv7-a -mfloat-abi=softfp -mfpu=vfpv3-d16,-march=armv5te -mtune=xscale -msoft-float]
-
-#defer ANDROID_arm_LDFLAGS -march=armv7-a -Wl,--fix-cortex-a8
-
-#define ANDROID_mips_CFLAGS\
- -fpic\
- -fno-strict-aliasing\
- -finline-functions\
- -ffunction-sections\
- -funwind-tables\
- -fmessage-length=0\
- -fno-inline-functions-called-once\
- -fgcse-after-reload\
- -frerun-cse-after-loop\
- -frename-registers
-
-#define ANDROID_mips_LDFLAGS
-
-#define ANDROID_x86_CFLAGS\
- -ffunction-sections\
- -funwind-tables\
- -fstack-protector
-
-#define ANDROID_x86_LDFLAGS
-
-// Select the flags for our architecture and add some common ones.
-#defer ANDROID_CFLAGS $[ANDROID_$[ANDROID_ARCH]_CFLAGS] -DANDROID -Wa,$[if $[ANDROID_DISABLE_NX],--execstack,--noexecstack]
-#defer ANDROID_LDFLAGS -Wl,--no-undefined\
- -Wl,-z,$[if $[ANDROID_DISABLE_NX],execstack,noexecstack]\
- -Wl,-z,$[if $[ANDROID_DISABLE_RELRO],norelro,relro]\
- -Wl,-z,$[if $[ANDROID_DISABLE_RELRO],lazy,now]
-
-// How to compile a C or C++ file into a .o file.  $[target] is the
-// name of the .o file, $[source] is the name of the source file,
-// $[ipath] is a space-separated list of directories to search for
-// include files, and $[flags] is a list of additional flags to pass
-// to the compiler.
-#defer os_ipath $[subst \,/,$[osfilename $[ipath]]]
-#defer COMPILE_C $[CC] $[SYSROOT_FLAGS] $[ANDROID_CFLAGS] $[CFLAGS_GEN] $[flags] $[os_ipath:%=-I%] -c $[source] -o $[target]
-#defer COMPILE_C++ $[CXX] $[SYSROOT_FLAGS] $[ANDROID_CFLAGS] $[C++FLAGS_GEN] $[flags] $[os_ipath:%=-I%] -c $[source] -o $[target]
-
-// What flags should be passed to both C and C++ compilers to enable
-// debug symbols?  This will be supplied when OPTIMIZE (above) is set
-// to 1, 2, or 3.
-#defer DEBUGFLAGS -g
-
-// What flags should be passed to both C and C++ compilers to enable
-// compiler optimizations?  This will be supplied when OPTIMIZE
-// (above) is set to 2, 3, or 4.
-#defer OPTFLAGS -O2
-
-// By convention, any source file that contains the string _no_opt_ in
-// its filename won't have the above compiler optimizations run for it.
-#defer no_opt $[findstring _no_opt_,$[source]]
-
-// What define variables should be passed to the compilers for each
-// value of OPTIMIZE?  We separate this so we can pass these same
-// options to interrogate, guaranteeing that the correct interfaces
-// are generated.  Do not include -D here; that will be supplied
-// automatically.
-#defer CDEFINES_OPT1 $[EXTRA_CDEFS]
-#defer CDEFINES_OPT2 $[EXTRA_CDEFS]
-#defer CDEFINES_OPT3 $[EXTRA_CDEFS]
-#defer CDEFINES_OPT4 $[EXTRA_CDEFS]
-
-// What additional flags should be passed for each value of OPTIMIZE
-// (above)?  We separate out the compiler-optimization flags, above,
-// so we can compile certain files that give optimizers trouble (like
-// the output of lex and yacc) without them, but with all the other
-// relevant flags.
-
-#define ANDROID_DEBUG_CFLAGS -fno-omit-frame-pointer -fno-strict-aliasing
-#define ANDROID_RELEASE_CFLAGS -fomit-frame-pointer -fstrict-aliasing -funswitch-loops -finline-limit=300
-
-#defer CFLAGS_OPT1 $[CDEFINES_OPT1:%=-D%] -Wall $[DEBUGFLAGS] $[ANDROID_DEBUG_FLAGS]
-#defer CFLAGS_OPT2 $[CDEFINES_OPT2:%=-D%] -Wall $[DEBUGFLAGS] $[if $[no_opt],,$[OPTFLAGS]] $[ANDROID_DEBUG_FLAGS]
-#defer CFLAGS_OPT3 $[CDEFINES_OPT3:%=-D%] $[DEBUGFLAGS] $[if $[no_opt],,$[OPTFLAGS]] $[ANDROID_RELEASE_FLAGS]
-#defer CFLAGS_OPT4 $[CDEFINES_OPT4:%=-D%] $[if $[no_opt],,$[OPTFLAGS]] $[ANDROID_RELEASE_FLAGS]
-
-// What additional flags should be passed to both compilers when
-// building shared (relocatable) sources?  Some architectures require
-// special support for this.
-#defer CFLAGS_SHARED -fPIC
-
-// How to generate a C or C++ executable from a collection of .o
-// files.  $[target] is the name of the binary to generate, and
-// $[sources] is the list of .o files.  $[libs] is a space-separated
-// list of dependent libraries, and $[lpath] is a space-separated list
-// of directories in which those libraries can be found.
-#defer os_lpath $[subst \,/,$[osfilename $[lpath]]]
-#defer LINK_BIN_C $[LINK_BIN_C++]
-#defer LINK_BIN_C++ $[cxx_ld]\
- -Wl,--gc-sections\
- -Wl,-z,nocopyreloc\
- $[SYSROOT_FLAGS]\
- $[sources]\
- $[flags]\
- $[os_lpath:%=-L%] $[libs:%=-l%]\
- -o $[target]
-
-// How to generate a static C or C++ library.  $[target] is the
-// name of the library to generate, and $[sources] is the list of .o
-// files that will go into the library.
-#defer STATIC_LIB_C $[AR] cru $[target] $[sources]
-#defer STATIC_LIB_C++ $[AR] cru $[target] $[sources]
-
-// How to run ranlib, if necessary, after generating a static library.
-// $[target] is the name of the library.  Set this to the empty string
-// if ranlib is not necessary on your platform.
-#defer RANLIB ranlib $[target]
-
-// Where to put the so_locations file, used by an Irix MIPSPro
-// compiler, to generate a map of shared library memory locations.
-#defer SO_LOCATIONS $[DTOOL_INSTALL]/etc/so_locations
-
-
-// How to generate a shared C or C++ library.  $[source] and $[target]
-// as above, and $[libs] is a space-separated list of dependent
-// libraries, and $[lpath] is a space-separated list of directories in
-// which those libraries can be found.
-#defer SHARED_LIB_C $[SHARED_LIB_C++]
-#defer SHARED_LIB_C++ $[cxx_ld]\
- -Wl,-soname,$[notdir $[target]]\
- -shared\
- $[SYSROOT_FLAGS]\
- $[sources]\
- $[flags]\
- $[os_lpath:%=-L%] $[libs:%=-l%]\
- -o $[target]
-#define BUNDLE_LIB_C++
-
-// How to install a data file or executable file.  $[local] is the
-// local name of the file to install, and $[dest] is the name of the
-// directory to put it in.
-
-// On Unix systems, we strongly prefer using the install program to
-// install files.  This has nice features like automatically setting
-// the permissions bits, and also is usually clever enough to install
-// a running program without crashing the running instance.  However,
-// it doesn't understanding installing a program from a subdirectory,
-// so we have to cd into the source directory first.
-#defer install_dash_p $[if $[KEEP_TIMESTAMPS],-p,]
-#defer INSTALL $[if $[ne $[dir $[local]], ./],cd ./$[dir $[local]] &&] install -m $[INSTALL_UMASK_DATA] $[install_dash_p] $[notdir $[local]] $[dest]/
-#defer INSTALL_PROG $[if $[ne $[dir $[local]], ./],cd ./$[dir $[local]] &&] install -m $[INSTALL_UMASK_PROG] $[install_dash_p] $[notdir $[local]] $[dest]/
-
-#define SYSTEM_IGATE_FLAGS -D__const=const -Dvolatile -Dmutable
-
-// Posix thread support is provided by the Android NDK.
-#define HAVE_POSIX_THREADS 1
-#define THREADS_LIBS
-
-// Is the platform big-endian (like an SGI workstation) or
-// little-endian (like a PC)?  Define this to the empty string to
-// indicate little-endian, or nonempty to indicate big-endian.
-#define WORDS_BIGENDIAN
-
-// Does the C++ compiler support namespaces?
-#define HAVE_NAMESPACE 1
-
-// Does the C++ compiler support ios::binary?
-#define HAVE_IOS_BINARY 1
-
-// How about the typename keyword?
-#define HAVE_TYPENAME 1
-
-// Will the compiler avoid inserting extra bytes in structs between a
-// base struct and its derived structs?  It is safe to define this
-// false if you don't know, but if you know that you can get away with
-// this you may gain a tiny performance gain by defining this true.
-// If you define this true incorrectly, you will get lots of
-// assertion failures on execution.
-#define SIMPLE_STRUCT_POINTERS
-
-// Does gettimeofday() take only one parameter?
-#define GETTIMEOFDAY_ONE_PARAM
-
-// Do we have getopt() and/or getopt_long_only() built into the
-// system?
-#define HAVE_GETOPT 1
-#define HAVE_GETOPT_LONG_ONLY 1
-
-// Are the above getopt() functions defined in getopt.h, or somewhere else?
-#define PHAVE_GETOPT_H 1
-
-// Can we determine the terminal width by making an ioctl(TIOCGWINSZ) call?
-#define IOCTL_TERMINAL_WIDTH 1
-
-// Do the system headers define a "streamsize" typedef?  How about the
-// ios::binary enumerated value?  And other ios typedef symbols like
-// ios::openmode and ios::fmtflags?
-#define HAVE_STREAMSIZE 1
-#define HAVE_IOS_BINARY 1
-#define HAVE_IOS_TYPEDEFS 1
-
-// Can we safely call getenv() at static init time?
-#define STATIC_INIT_GETENV 1
-
-// Can we read the files /proc/self/* to determine our
-// environment variables at static init time?
-#define HAVE_PROC_SELF_EXE 1
-#define HAVE_PROC_SELF_MAPS 1
-#define HAVE_PROC_SELF_ENVIRON 1
-#define HAVE_PROC_SELF_CMDLINE 1
-
-// Do we have a global pair of argc/argv variables that we can read at
-// static init time?  Should we prototype them?  What are they called?
-#define HAVE_GLOBAL_ARGV
-#define PROTOTYPE_GLOBAL_ARGV
-#define GLOBAL_ARGV
-#define GLOBAL_ARGC
-
-// Should we include <iostream> or <iostream.h>?  Define PHAVE_IOSTREAM
-// to nonempty if we should use <iostream>, or empty if we should use
-// <iostream.h>.
-#define PHAVE_IOSTREAM 1
-
-// Do we have a true stringstream class defined in <sstream>?
-#define PHAVE_SSTREAM 1
-
-// Does fstream::open() require a third parameter, specifying the
-// umask?  Versions of gcc prior to 3.2 had this.
-#define HAVE_OPEN_MASK
-
-// Do we have the lockf() function available?
-#define HAVE_LOCKF
-
-// Do the compiler or system libraries define wchar_t for you?
-#define HAVE_WCHAR_T 1
-
-// Does <string> define the typedef wstring?  Most do, but for some
-// reason, versions of gcc before 3.0 didn't do this.
-#define HAVE_WSTRING 1
-
-// Do we have <new>?
-#define PHAVE_NEW 1
-
-// Do we have <io.h>?
-#define PHAVE_IO_H
-
-// Do we have <malloc.h>?
-#define PHAVE_MALLOC_H 1
-
-// Do we have <alloca.h>?
-#define PHAVE_ALLOCA_H 1
-
-// Do we have <locale.h>?
-#define PHAVE_LOCALE_H 1
-
-// Do we have <string.h>?
-#define PHAVE_STRING_H 1
-
-// Do we have <stdlib.h>?
-#define PHAVE_STDLIB_H 1
-
-// Do we have <limits.h>?
-#define PHAVE_LIMITS_H 1
-
-// Do we have <minmax.h>?
-#define PHAVE_MINMAX_H
-
-// Do we have <sys/types.h>?
-#define PHAVE_SYS_TYPES_H 1
-#define PHAVE_SYS_TIME_H 1
-
-// Do we have <unistd.h>?
-#define PHAVE_UNISTD_H 1
-
-// Do we have <utime.h>?
-#define PHAVE_UTIME_H 1
-
-// Do we have <dirent.h>?
-#define PHAVE_DIRENT_H 1
-
-// Do we have <glob.h> (and do we want to use it instead of dirent.h)?
-#define PHAVE_GLOB_H
-
-// Do we have <sys/soundcard.h> (and presumably a Linux-style audio
-// interface)?
-#define PHAVE_SYS_SOUNDCARD_H 1
-
-// Do we have <ucontext.h> (and therefore makecontext() / swapcontext())?
-#define PHAVE_UCONTEXT_H 1
-
-// Do we have <linux/input.h> ? This enables us to use raw mouse input.
-#define PHAVE_LINUX_INPUT_H 1
-
-// Do we have RTTI (and <typeinfo>)?
-// Technically, Android has RTTI support now,
-// but we keep it disabled for performance reasons.
-#define HAVE_RTTI
-
-// Do we have <stdint.h>?
-#define PHAVE_STDINT_H 1
-
-// We need 64-bit file i/o
-#define __USE_LARGEFILE64 1
-
-// The dynamic library file extension (usually .so .dll or .dylib):
-#define DYNAMIC_LIB_EXT .so
-#define STATIC_LIB_EXT .a
-#define BUNDLE_EXT

+ 0 - 12
dtool/Config.Cygwin.pp

@@ -1,12 +0,0 @@
-//
-// Config.Cygwin.pp
-//
-// This file defines some custom config variables for the Windows
-// platform, when ppremake has been compiled using Cygwin.  It
-// inherits most of its parameters from Config.Win32.pp.
-//
-
-// Note: if you are building for 64-bit Windows, you should configure
-// ppremake with the "Cygwin64" platform name instead of "Cygwin".
-
-#include $[THISDIRPREFIX]Config.Win32.pp

+ 0 - 10
dtool/Config.Cygwin64.pp

@@ -1,10 +0,0 @@
-//
-// Config.Cygwin64.pp
-//
-// This file defines some custom config variables for the Windows
-// platform, when ppremake has been compiled using Cygwin.  It
-// inherits most of its parameters from Config.Win64.pp.
-//
-
-// 32-bit
-#include $[THISDIRPREFIX]Config.Win64.pp

+ 0 - 303
dtool/Config.FreeBSD.pp

@@ -1,303 +0,0 @@
-//
-// Config.FreeBSD.pp
-//
-// This file defines some custom config variables for the FreeBSD
-// platform.  It makes some initial guesses about compiler features,
-// etc.
-//
-
-// *******************************************************************
-// NOTE: you should not attempt to copy this file verbatim as your own
-// personal Config.pp file.  Instead, you should start with an empty
-// Config.pp file, and add lines to it when you wish to override
-// settings given in here.  In the normal ppremake system, this file
-// will always be read first, and then your personal Config.pp file
-// will be read later, which gives you a chance to override the
-// default settings found in this file.  However, if you start by
-// copying the entire file, it will be difficult to tell which
-// settings you have customized, and it will be difficult to upgrade
-// to a subsequent version of Panda.
-// *******************************************************************
-
-#define IS_FREEBSD 1
-
-// Compiler flags
-
-// How to invoke the C and C++ compilers.
-#if $[eq $[USE_COMPILER], GCC]
-  #define CC gcc
-  #define CXX g++
-  #define AR ar
-
-  // gcc might run into template limits on some parts of Panda.
-  // I upped this from 25 to build on OS X (GCC 3.3) -- skyler.
-  #define C++FLAGS_GEN -ftemplate-depth-30
-#else
-  #define CC cc
-  #define CXX CC
-  #define AR ar
-#endif
-
-// FreeBSD doesn't (yet) have any funny architecture flags.
-#defer ARCH_FLAGS
-
-// How to compile a C or C++ file into a .o file.  $[target] is the
-// name of the .o file, $[source] is the name of the source file,
-// $[ipath] is a space-separated list of directories to search for
-// include files, and $[flags] is a list of additional flags to pass
-// to the compiler.
-#defer COMPILE_C $[CC] $[CFLAGS_GEN] $[ARCH_FLAGS] -c -o $[target] $[ipath:%=-I%] $[flags] $[source]
-#defer COMPILE_C++ $[CXX] $[C++FLAGS_GEN] $[ARCH_FLAGS] -c -o $[target] $[ipath:%=-I%] $[flags] $[source]
-
-// What flags should be passed to both C and C++ compilers to enable
-// debug symbols?  This will be supplied when OPTIMIZE (above) is set
-// to 1, 2, or 3.
-#defer DEBUGFLAGS -g
-
-// What flags should be passed to both C and C++ compilers to enable
-// compiler optimizations?  This will be supplied when OPTIMIZE
-// (above) is set to 2, 3, or 4.
-#defer OPTFLAGS -O2
-
-// By convention, any source file that contains the string _no_opt_ in
-// its filename won't have the above compiler optimizations run for it.
-#defer no_opt $[findstring _no_opt_,$[source]]
-
-// What define variables should be passed to the compilers for each
-// value of OPTIMIZE?  We separate this so we can pass these same
-// options to interrogate, guaranteeing that the correct interfaces
-// are generated.  Do not include -D here; that will be supplied
-// automatically.
-#defer CDEFINES_OPT1 $[EXTRA_CDEFS]
-#defer CDEFINES_OPT2 $[EXTRA_CDEFS]
-#defer CDEFINES_OPT3 $[EXTRA_CDEFS]
-#defer CDEFINES_OPT4 $[EXTRA_CDEFS]
-
-// What additional flags should be passed for each value of OPTIMIZE
-// (above)?  We separate out the compiler-optimization flags, above,
-// so we can compile certain files that give optimizers trouble (like
-// the output of lex and yacc) without them, but with all the other
-// relevant flags.
-#defer CFLAGS_OPT1 $[CDEFINES_OPT1:%=-D%] -Wall $[DEBUGFLAGS]
-#defer CFLAGS_OPT2 $[CDEFINES_OPT2:%=-D%] -Wall $[DEBUGFLAGS] $[if $[no_opt],,$[OPTFLAGS]]
-#defer CFLAGS_OPT3 $[CDEFINES_OPT3:%=-D%] $[DEBUGFLAGS] $[if $[no_opt],,$[OPTFLAGS]]
-#defer CFLAGS_OPT4 $[CDEFINES_OPT4:%=-D%] $[if $[no_opt],,$[OPTFLAGS]]
-
-// What additional flags should be passed to both compilers when
-// building shared (relocatable) sources?  Some architectures require
-// special support for this.
-#defer CFLAGS_SHARED -fPIC
-
-// How to generate a C or C++ executable from a collection of .o
-// files.  $[target] is the name of the binary to generate, and
-// $[sources] is the list of .o files.  $[libs] is a space-separated
-// list of dependent libraries, and $[lpath] is a space-separated list
-// of directories in which those libraries can be found.
-#defer LINK_BIN_C $[cc_ld] $[ARCH_FLAGS] -o $[target] $[sources] $[flags] $[lpath:%=-L%] $[libs:%=-l%]\
- $[fpath:%=-Wl,-F%] $[patsubst %,-framework %, $[bin_frameworks]]
-#defer LINK_BIN_C++ $[cxx_ld] $[ARCH_FLAGS] \
- -o $[target] $[sources]\
- $[flags]\
- $[lpath:%=-L%] $[libs:%=-l%]\
- $[fpath:%=-Wl,-F%] $[patsubst %,-framework %, $[bin_frameworks]]
-
-// How to generate a static C or C++ library.  $[target] is the
-// name of the library to generate, and $[sources] is the list of .o
-// files that will go into the library.
-#defer STATIC_LIB_C $[AR] cru $[target] $[sources]
-#defer STATIC_LIB_C++ $[AR] cru $[target] $[sources]
-
-// How to run ranlib, if necessary, after generating a static library.
-// $[target] is the name of the library.  Set this to the empty string
-// if ranlib is not necessary on your platform.
-#defer RANLIB ranlib $[target]
-
-// Where to put the so_locations file, used by an Irix MIPSPro
-// compiler, to generate a map of shared library memory locations.
-#defer SO_LOCATIONS $[DTOOL_INSTALL]/etc/so_locations
-
-
-// How to generate a shared C or C++ library.  $[source] and $[target]
-// as above, and $[libs] is a space-separated list of dependent
-// libraries, and $[lpath] is a space-separated list of directories in
-// which those libraries can be found.
-#defer SHARED_LIB_C $[cc_ld] -shared $[LFLAGS] -o $[target] $[sources] $[lpath:%=-L%] $[libs:%=-l%]
-#defer SHARED_LIB_C++ $[cxx_ld] -shared $[LFLAGS] -o $[target] $[sources] $[lpath:%=-L%] $[libs:%=-l%]
-#define BUNDLE_LIB_C++
-
-// How to install a data file or executable file.  $[local] is the
-// local name of the file to install, and $[dest] is the name of the
-// directory to put it in.
-
-// On Unix systems, we strongly prefer using the install program to
-// install files.  This has nice features like automatically setting
-// the permissions bits, and also is usually clever enough to install
-// a running program without crashing the running instance.  However,
-// it doesn't understanding installing a program from a subdirectory,
-// so we have to cd into the source directory first.
-#defer install_dash_p $[if $[KEEP_TIMESTAMPS],-p,]
-#defer INSTALL $[if $[ne $[dir $[local]], ./],cd ./$[dir $[local]] &&] install -m $[INSTALL_UMASK_DATA] $[install_dash_p] $[notdir $[local]] $[dest]/
-#defer INSTALL_PROG $[if $[ne $[dir $[local]], ./],cd ./$[dir $[local]] &&] install -m $[INSTALL_UMASK_PROG] $[install_dash_p] $[notdir $[local]] $[dest]/
-
-// What additional flags should we pass to interrogate?
-#if $[eq $[shell uname -m], amd64] // if FreeBSD is 64bit
-  #define SYSTEM_IGATE_FLAGS -D_LP64 -D__const=const -Dvolatile -Dmutable
-#else
-  #define SYSTEM_IGATE_FLAGS -D__i386__ -D__const=const -Dvolatile -Dmutable
-#endif
-
-// Is the platform big-endian (like an SGI workstation) or
-// little-endian (like a PC)?  Define this to the empty string to
-// indicate little-endian, or nonempty to indicate big-endian.
-#define WORDS_BIGENDIAN
-
-// Does the C++ compiler support namespaces?
-#define HAVE_NAMESPACE 1
-
-// Does the C++ compiler support ios::binary?
-#define HAVE_IOS_BINARY 1
-
-// How about the typename keyword?
-#define HAVE_TYPENAME 1
-
-// Will the compiler avoid inserting extra bytes in structs between a
-// base struct and its derived structs?  It is safe to define this
-// false if you don't know, but if you know that you can get away with
-// this you may gain a tiny performance gain by defining this true.
-// If you define this true incorrectly, you will get lots of
-// assertion failures on execution.
-#define SIMPLE_STRUCT_POINTERS
-
-// Does gettimeofday() take only one parameter?
-#define GETTIMEOFDAY_ONE_PARAM
-
-// Do we have getopt() and/or getopt_long_only() built into the
-// system?
-#define HAVE_GETOPT 1
-#define HAVE_GETOPT_LONG_ONLY 1
-
-// Are the above getopt() functions defined in getopt.h, or somewhere else?
-#define PHAVE_GETOPT_H 1
-
-// Can we determine the terminal width by making an ioctl(TIOCGWINSZ) call?
-#define IOCTL_TERMINAL_WIDTH 1
-
-// Do the system headers define a "streamsize" typedef?  How about the
-// ios::binary enumerated value?  And other ios typedef symbols like
-// ios::openmode and ios::fmtflags?
-#define HAVE_STREAMSIZE 1
-#define HAVE_IOS_BINARY 1
-#define HAVE_IOS_TYPEDEFS 1
-
-// Can we safely call getenv() at static init time?
-#define STATIC_INIT_GETENV 1
-
-// Can we read the files /proc/self/* to determine our
-// environment variables at static init time?
-#define HAVE_PROC_CURPROC_FILE 1
-#define HAVE_PROC_CURPROC_MAP 1
-#define HAVE_PROC_CURPROC_CMDLINE 1
-
-// Do we have a global pair of argc/argv variables that we can read at
-// static init time?  Should we prototype them?  What are they called?
-#define HAVE_GLOBAL_ARGV
-#define PROTOTYPE_GLOBAL_ARGV
-#define GLOBAL_ARGV
-#define GLOBAL_ARGC
-
-// Should we include <iostream> or <iostream.h>?  Define PHAVE_IOSTREAM
-// to nonempty if we should use <iostream>, or empty if we should use
-// <iostream.h>.
-#define PHAVE_IOSTREAM 1
-
-// Do we have a true stringstream class defined in <sstream>?
-#define PHAVE_SSTREAM 1
-
-// Does fstream::open() require a third parameter, specifying the
-// umask?  Versions of gcc prior to 3.2 had this.
-#define HAVE_OPEN_MASK
-
-// Do we have the lockf() function available?
-#define HAVE_LOCKF 1
-
-// Do the compiler or system libraries define wchar_t for you?
-#define HAVE_WCHAR_T 1
-
-// Does <string> define the typedef wstring?  Most do, but for some
-// reason, versions of gcc before 3.0 didn't do this.
-#define HAVE_WSTRING 1
-
-// Do we have <new>?
-#define PHAVE_NEW 1
-
-// Do we have <io.h>?
-#define PHAVE_IO_H
-
-// Do we have <malloc.h>?
-#define PHAVE_MALLOC_H
-
-// Do we have <alloca.h>?
-#define PHAVE_ALLOCA_H
-
-// Do we have <locale.h>?
-#define PHAVE_LOCALE_H 1
-
-// Do we have <string.h>?
-#define PHAVE_STRING_H 1
-
-// Do we have <stdlib.h>?
-#define PHAVE_STDLIB_H 1
-
-// Do we have <limits.h>?
-#define PHAVE_LIMITS_H 1
-
-// Do we have <minmax.h>?
-#define PHAVE_MINMAX_H
-
-// Do we have <sys/types.h>?
-#define PHAVE_SYS_TYPES_H 1
-#define PHAVE_SYS_TIME_H 1
-
-// Do we have <unistd.h>?
-#define PHAVE_UNISTD_H 1
-
-// Do we have <utime.h>?
-#define PHAVE_UTIME_H 1
-
-// Do we have <dirent.h>?
-#define PHAVE_DIRENT_H 1
-
-// Do we have <glob.h> (and do we want to use it instead of dirent.h)?
-#define PHAVE_GLOB_H 1
-
-// Do we have <sys/soundcard.h> (and presumably a Linux-style audio
-// interface)?
-#define PHAVE_SYS_SOUNDCARD_H 1
-
-// Do we have <ucontext.h> (and therefore makecontext() / swapcontext())?
-#define PHAVE_UCONTEXT_H 1
-
-// Do we have <linux/input.h> ? This enables us to use raw mouse input.
-#define PHAVE_LINUX_INPUT_H
-
-// Do we have <stdint.h>?
-#define PHAVE_STDINT_H 1
-
-// Do we have RTTI (and <typeinfo>)?
-#define HAVE_RTTI 1
-
-// We need 64-bit file i/o
-#define __USE_LARGEFILE64 1
-
-// The dynamic library file extension (usually .so .dll or .dylib):
-#define DYNAMIC_LIB_EXT .so
-#define STATIC_LIB_EXT .a
-#define BUNDLE_EXT
-
-#if $[isdir /usr/PCBSD/local] // if we're running PC-BSD
-  #define EXTRA_IPATH /usr/PCBSD/local/include/
-  #define EXTRA_LPATH /usr/PCBSD/local/lib/
-#endif
-
-#define EXTRA_IPATH /usr/local/include/
-#define EXTRA_LPATH /usr/local/lib/

+ 0 - 151
dtool/Config.Irix.pp

@@ -1,151 +0,0 @@
-//
-// Config.Irix.pp
-//
-// This file defines some custom config variables for the SGI/Irix
-// platform.  It makes some initial guesses about compiler features,
-// etc.
-//
-
-// *******************************************************************
-// NOTE: you should not attempt to copy this file verbatim as your own
-// personal Config.pp file.  Instead, you should start with an empty
-// Config.pp file, and add lines to it when you wish to override
-// settings given in here.  In the normal ppremake system, this file
-// will always be read first, and then your personal Config.pp file
-// will be read later, which gives you a chance to override the
-// default settings found in this file.  However, if you start by
-// copying the entire file, it will be difficult to tell which
-// settings you have customized, and it will be difficult to upgrade
-// to a subsequent version of Panda.
-// *******************************************************************
-
-// What additional flags should we pass to interrogate?
-#define SYSTEM_IGATE_FLAGS -D__mips__ -D__MIPSEB__ -D_LANGUAGE_C_PLUS_PLUS -D_MIPS_SZINT=32 -D_MIPS_SZLONG=32 -D_MIPS_SZPTR=32
-
-// Is the platform big-endian (like an SGI workstation) or
-// little-endian (like a PC)?  Define this to the empty string to
-// indicate little-endian, or nonempty to indicate big-endian.
-#define WORDS_BIGENDIAN 1
-
-// Does the C++ compiler support namespaces?
-#define HAVE_NAMESPACE 1
-
-// How about the typename keyword?
-#define HAVE_TYPENAME 1
-
-// Will the compiler avoid inserting extra bytes in structs between a
-// base struct and its derived structs?  It is safe to define this
-// false if you don't know, but if you know that you can get away with
-// this you may gain a tiny performance gain by defining this true.
-// If you define this true incorrectly, you will get lots of
-// assertion failures on execution.
-#define SIMPLE_STRUCT_POINTERS
-
-// Does gettimeofday() take only one parameter?
-#define GETTIMEOFDAY_ONE_PARAM
-
-// Do we have getopt() and/or getopt_long_only() built into the
-// system?
-#define HAVE_GETOPT 1
-#define HAVE_GETOPT_LONG_ONLY
-
-// Are the above getopt() functions defined in getopt.h, or somewhere else?
-#define PHAVE_GETOPT_H 1
-
-// Can we determine the terminal width by making an ioctl(TIOCGWINSZ) call?
-#define IOCTL_TERMINAL_WIDTH 1
-
-// Do the system headers define a "streamsize" typedef?  How about the
-// ios::binary enumerated value?  And other ios typedef symbols like
-// ios::openmode and ios::fmtflags?
-#define HAVE_STREAMSIZE
-#define HAVE_IOS_BINARY
-#define HAVE_IOS_TYPEDEFS
-
-// Can we safely call getenv() at static init time?
-#define STATIC_INIT_GETENV 1
-
-// Can we read the file /proc/self/environ to determine our
-// environment variables at static init time?
-#define HAVE_PROC_SELF_ENVIRON
-
-// Do we have a global pair of argc/argv variables that we can read at
-// static init time?  Should we prototype them?  What are they called?
-#define HAVE_GLOBAL_ARGV 1
-#define PROTOTYPE_GLOBAL_ARGV 1
-#define GLOBAL_ARGV __Argv
-#define GLOBAL_ARGC __Argc
-
-// Can we read the file /proc/self/cmdline to determine our
-// command-line arguments at static init time?
-#define HAVE_PROC_SELF_CMDLINE
-
-// Should we include <iostream> or <iostream.h>?  Define PHAVE_IOSTREAM
-// to nonempty if we should use <iostream>, or empty if we should use
-// <iostream.h>.
-#define PHAVE_IOSTREAM
-
-// Do we have a true stringstream class defined in <sstream>?
-#define PHAVE_SSTREAM
-
-// Does fstream::open() require a third parameter, specifying the
-// umask?
-#define HAVE_OPEN_MASK 1
-
-// Do we have the lockf() function available?
-#define HAVE_LOCKF 1
-
-// Do the compiler or system libraries define wchar_t for you?
-#define HAVE_WCHAR_T 1
-
-// Does <string> define the typedef wstring?  Most do, but for some
-// reason, versions of gcc before 3.0 didn't do this.
-#define HAVE_WSTRING 1
-
-// Do we have <new>?
-#define PHAVE_NEW
-
-// Do we have <io.h>?
-#define PHAVE_IO_H
-
-// Do we have <malloc.h>?
-#define PHAVE_MALLOC_H 1
-
-// Do we have <alloca.h>?
-#define PHAVE_ALLOCA_H 1
-
-// Do we have <locale.h>?
-#define PHAVE_LOCALE_H 1
-
-// Do we have <minmax.h>?
-#define PHAVE_MINMAX_H
-
-// Do we have <sys/types.h>?
-#define PHAVE_SYS_TYPES_H 1
-#define PHAVE_SYS_TIME_H 1
-
-// Do we have <unistd.h>?
-#define PHAVE_UNISTD_H 1
-
-// Do we have <utime.h>?
-#define PHAVE_UTIME_H 1
-
-// Do we have <dirent.h>?
-#define PHAVE_DIRENT_H 1
-
-// Do we have <sys/soundcard.h> (and presumably a Linux-style audio
-// interface)?
-#define PHAVE_SYS_SOUNDCARD_H
-
-// Do we have <stdint.h>?
-#define PHAVE_STDINT_H
-
-// Do we have RTTI (and <typeinfo>)?
-#define HAVE_RTTI 1
-
-// The Irix compiler doesn't support the modern STL allocator.
-#define USE_STL_ALLOCATOR
-
-// The dynamic library file extension (usually .so .dll or .dylib):
-#define DYNAMIC_LIB_EXT .so
-#define BUNDLE_EXT

+ 0 - 339
dtool/Config.Linux.pp

@@ -1,339 +0,0 @@
-//
-// Config.Linux.pp
-//
-// This file defines some custom config variables for the Linux
-// platform.  It makes some initial guesses about compiler features,
-// etc.
-//
-
-// *******************************************************************
-// NOTE: you should not attempt to copy this file verbatim as your own
-// personal Config.pp file.  Instead, you should start with an empty
-// Config.pp file, and add lines to it when you wish to override
-// settings given in here.  In the normal ppremake system, this file
-// will always be read first, and then your personal Config.pp file
-// will be read later, which gives you a chance to override the
-// default settings found in this file.  However, if you start by
-// copying the entire file, it will be difficult to tell which
-// settings you have customized, and it will be difficult to upgrade
-// to a subsequent version of Panda.
-// *******************************************************************
-
-#define IS_LINUX 1
-
-// Compiler flags
-
-// How to invoke the C and C++ compilers.
-#if $[eq $[USE_COMPILER], GCC]
-  #define CC gcc
-  #define CXX g++
-  #define AR ar
-
-  // gcc might run into template limits on some parts of Panda.
-  // I upped this from 25 to build on OS X (GCC 3.3) -- skyler.
-  #define C++FLAGS_GEN -ftemplate-depth-30
-#else
-  #define CC cc
-  #define CXX CC
-  #define AR ar
-#endif
-
-// Linux doesn't (yet) have any funny architecture flags.
-#defer ARCH_FLAGS 
-
-// How to compile a C or C++ file into a .o file.  $[target] is the
-// name of the .o file, $[source] is the name of the source file,
-// $[ipath] is a space-separated list of directories to search for
-// include files, and $[flags] is a list of additional flags to pass
-// to the compiler.
-#defer COMPILE_C $[CC] $[CFLAGS_GEN] $[ARCH_FLAGS] -c -o $[target] $[ipath:%=-I%] $[flags] $[source]
-#defer COMPILE_C++ $[CXX] $[C++FLAGS_GEN] $[ARCH_FLAGS] -c -o $[target] $[ipath:%=-I%] $[flags] $[source]
-
-// What flags should be passed to both C and C++ compilers to enable
-// debug symbols?  This will be supplied when OPTIMIZE (above) is set
-// to 1, 2, or 3.
-#defer DEBUGFLAGS -g
-
-// What flags should be passed to both C and C++ compilers to enable
-// compiler optimizations?  This will be supplied when OPTIMIZE
-// (above) is set to 2, 3, or 4.
-#defer OPTFLAGS -O2
-
-// By convention, any source file that contains the string _no_opt_ in
-// its filename won't have the above compiler optimizations run for it.
-#defer no_opt $[findstring _no_opt_,$[source]]
-
-// What define variables should be passed to the compilers for each
-// value of OPTIMIZE?  We separate this so we can pass these same
-// options to interrogate, guaranteeing that the correct interfaces
-// are generated.  Do not include -D here; that will be supplied
-// automatically.
-#defer CDEFINES_OPT1 $[EXTRA_CDEFS]
-#defer CDEFINES_OPT2 $[EXTRA_CDEFS]
-#defer CDEFINES_OPT3 $[EXTRA_CDEFS]
-#defer CDEFINES_OPT4 $[EXTRA_CDEFS]
-
-// What additional flags should be passed for each value of OPTIMIZE
-// (above)?  We separate out the compiler-optimization flags, above,
-// so we can compile certain files that give optimizers trouble (like
-// the output of lex and yacc) without them, but with all the other
-// relevant flags.
-#defer CFLAGS_OPT1 $[CDEFINES_OPT1:%=-D%] -Wall $[DEBUGFLAGS]
-#defer CFLAGS_OPT2 $[CDEFINES_OPT2:%=-D%] -Wall $[DEBUGFLAGS] $[if $[no_opt],,$[OPTFLAGS]]
-#defer CFLAGS_OPT3 $[CDEFINES_OPT3:%=-D%] $[DEBUGFLAGS] $[if $[no_opt],,$[OPTFLAGS]]
-#defer CFLAGS_OPT4 $[CDEFINES_OPT4:%=-D%] $[if $[no_opt],,$[OPTFLAGS]]
-
-// What additional flags should be passed to both compilers when
-// building shared (relocatable) sources?  Some architectures require
-// special support for this.
-#defer CFLAGS_SHARED -fPIC
-
-// How to generate a C or C++ executable from a collection of .o
-// files.  $[target] is the name of the binary to generate, and
-// $[sources] is the list of .o files.  $[libs] is a space-separated
-// list of dependent libraries, and $[lpath] is a space-separated list
-// of directories in which those libraries can be found.
-#defer LINK_BIN_C $[cc_ld] $[ARCH_FLAGS] -o $[target] $[sources] $[flags] $[lpath:%=-L%] $[libs:%=-l%]\
- $[fpath:%=-Wl,-F%] $[patsubst %,-framework %, $[bin_frameworks]]
-#defer LINK_BIN_C++ $[cxx_ld] $[ARCH_FLAGS] \
- -o $[target] $[sources]\
- $[flags]\
- $[lpath:%=-L%] $[libs:%=-l%]\
- $[fpath:%=-Wl,-F%] $[patsubst %,-framework %, $[bin_frameworks]]
-
-// How to generate a static C or C++ library.  $[target] is the
-// name of the library to generate, and $[sources] is the list of .o
-// files that will go into the library.
-#defer STATIC_LIB_C $[AR] cru $[target] $[sources]
-#defer STATIC_LIB_C++ $[AR] cru $[target] $[sources]
-
-// How to run ranlib, if necessary, after generating a static library.
-// $[target] is the name of the library.  Set this to the empty string
-// if ranlib is not necessary on your platform.
-#defer RANLIB ranlib $[target]
-
-// Where to put the so_locations file, used by an Irix MIPSPro
-// compiler, to generate a map of shared library memory locations.
-#defer SO_LOCATIONS $[DTOOL_INSTALL]/etc/so_locations
-
-
-// How to generate a shared C or C++ library.  $[source] and $[target]
-// as above, and $[libs] is a space-separated list of dependent
-// libraries, and $[lpath] is a space-separated list of directories in
-// which those libraries can be found.
-#defer SHARED_LIB_C $[cc_ld] -shared $[LFLAGS] -o $[target] $[sources] $[lpath:%=-L%] $[libs:%=-l%]
-#defer SHARED_LIB_C++ $[cxx_ld] -shared $[LFLAGS] -o $[target] $[sources] $[lpath:%=-L%] $[libs:%=-l%]
-#define BUNDLE_LIB_C++
-
-// How to install a data file or executable file.  $[local] is the
-// local name of the file to install, and $[dest] is the name of the
-// directory to put it in.
-
-// On Unix systems, we strongly prefer using the install program to
-// install files.  This has nice features like automatically setting
-// the permissions bits, and also is usually clever enough to install
-// a running program without crashing the running instance.  However,
-// it doesn't understanding installing a program from a subdirectory,
-// so we have to cd into the source directory first.
-#defer install_dash_p $[if $[KEEP_TIMESTAMPS],-p,]
-#defer INSTALL $[if $[ne $[dir $[local]], ./],cd ./$[dir $[local]] &&] install -m $[INSTALL_UMASK_DATA] $[install_dash_p] $[notdir $[local]] $[dest]/
-#defer INSTALL_PROG $[if $[ne $[dir $[local]], ./],cd ./$[dir $[local]] &&] install -m $[INSTALL_UMASK_PROG] $[install_dash_p] $[notdir $[local]] $[dest]/
-
-// Variable definitions for building with the Irix MIPSPro compiler.
-#if $[eq $[USE_COMPILER], MIPS]
-  #define CC cc -n32 -mips3
-  #define CXX CC -n32 -mips3
-
-  // Turn off a few annoying warning messages.
-  // 1174 - function 'blah' was declared but never used
-  // 1201 - trailing comma is nonstandard.
-  // 1209 - controlling expression is constant, e.g. if (0) { ... }
-  // 1234 - access control not specified, 'public' by default
-  // 1355 - extra ";" ignored
-  // 1375 - destructor for base class is not virtual.
-  //    this one actually is bad.  But we got alot of them from the classes
-  //    that we've derived from STL collections.  Beware of this.
-  // 3322 - omission of explicit type is nonstandard ("int" assumed)
-  #define WOFF_LIST -woff 1174,1201,1209,1234,1355,1375,3322
-
-  // Linker warnings
-  // 85 - definition of SOMESYMBOL in SOMELIB preempts that of definition in
-  //      SOMEOTHERLIB.
-  #define WOFF_LIST $[WOFF_LIST] -Wl,-LD_MSG:off=85
-
-  #defer OPTFLAGS -O2 -OPT:Olimit=2500
-
-  #defer CFLAGS_OPT1 $[CDEFINES_OPT1:%=-D%] $[WOFF_LIST] -g
-  #defer CFLAGS_OPT2 $[CDEFINES_OPT2:%=-D%] $[WOFF_LIST]
-  #defer CFLAGS_OPT3 $[CDEFINES_OPT3:%=-D%] $[WOFF_LIST]
-  #defer CFLAGS_OPT4 $[CDEFINES_OPT4:%=-D%] $[WOFF_LIST]
-
-  #defer CFLAGS_SHARED
-
-  #defer STATIC_LIB_C $[CC] -ar -o $[target] $[sources]
-  #defer STATIC_LIB_C++ $[CXX] -ar -o $[target] $[sources]
-  #defer RANLIB
-
-  #defer SHARED_FLAGS -Wl,-none -Wl,-update_registry,$[SO_LOCATIONS]
-  #defer SHARED_LIB_C $[cc_ld] -shared $[SHARED_FLAGS] -o $[target] $[sources] $[lpath:%=-L%] $[libs:%=-l%]
-  #defer SHARED_LIB_C++ $[cxx_ld] -shared $[SHARED_FLAGS] -o $[target] $[sources] $[lpath:%=-L%] $[libs:%=-l%]
-#endif
-
-
-
-// What additional flags should we pass to interrogate?
-#if $[eq $[shell uname -m], x86_64] // if Linux is 64bit
-  #define SYSTEM_IGATE_FLAGS -D_LP64 -D__const=const -Dvolatile -Dmutable
-#else
-  #define SYSTEM_IGATE_FLAGS -D__i386__ -D__const=const -Dvolatile -Dmutable
-#endif
-
-// Is the platform big-endian (like an SGI workstation) or
-// little-endian (like a PC)?  Define this to the empty string to
-// indicate little-endian, or nonempty to indicate big-endian.
-#define WORDS_BIGENDIAN
-
-// Does the C++ compiler support namespaces?
-#define HAVE_NAMESPACE 1
-
-// Does the C++ compiler support ios::binary?
-#define HAVE_IOS_BINARY 1
-
-// How about the typename keyword?
-#define HAVE_TYPENAME 1
-
-// Will the compiler avoid inserting extra bytes in structs between a
-// base struct and its derived structs?  It is safe to define this
-// false if you don't know, but if you know that you can get away with
-// this you may gain a tiny performance gain by defining this true.
-// If you define this true incorrectly, you will get lots of
-// assertion failures on execution.
-#define SIMPLE_STRUCT_POINTERS
-
-// Does gettimeofday() take only one parameter?
-#define GETTIMEOFDAY_ONE_PARAM
-
-// Do we have getopt() and/or getopt_long_only() built into the
-// system?
-#define HAVE_GETOPT 1
-#define HAVE_GETOPT_LONG_ONLY 1
-
-// Are the above getopt() functions defined in getopt.h, or somewhere else?
-#define PHAVE_GETOPT_H 1
-
-// Can we determine the terminal width by making an ioctl(TIOCGWINSZ) call?
-#define IOCTL_TERMINAL_WIDTH 1
-
-// Do the system headers define a "streamsize" typedef?  How about the
-// ios::binary enumerated value?  And other ios typedef symbols like
-// ios::openmode and ios::fmtflags?
-#define HAVE_STREAMSIZE 1
-#define HAVE_IOS_BINARY 1
-#define HAVE_IOS_TYPEDEFS 1
-
-// Can we safely call getenv() at static init time?
-#define STATIC_INIT_GETENV 1
-
-// Can we read the files /proc/self/* to determine our
-// environment variables at static init time?
-#define HAVE_PROC_SELF_EXE 1
-#define HAVE_PROC_SELF_MAPS 1
-#define HAVE_PROC_SELF_ENVIRON 1
-#define HAVE_PROC_SELF_CMDLINE 1
-
-// Do we have a global pair of argc/argv variables that we can read at
-// static init time?  Should we prototype them?  What are they called?
-#define HAVE_GLOBAL_ARGV
-#define PROTOTYPE_GLOBAL_ARGV
-#define GLOBAL_ARGV
-#define GLOBAL_ARGC
-
-// Should we include <iostream> or <iostream.h>?  Define PHAVE_IOSTREAM
-// to nonempty if we should use <iostream>, or empty if we should use
-// <iostream.h>.
-#define PHAVE_IOSTREAM 1
-
-// Do we have a true stringstream class defined in <sstream>?
-#define PHAVE_SSTREAM 1
-
-// Does fstream::open() require a third parameter, specifying the
-// umask?  Versions of gcc prior to 3.2 had this.
-#define HAVE_OPEN_MASK
-
-// Do we have the lockf() function available?
-#define HAVE_LOCKF 1
-
-// Do the compiler or system libraries define wchar_t for you?
-#define HAVE_WCHAR_T 1
-
-// Does <string> define the typedef wstring?  Most do, but for some
-// reason, versions of gcc before 3.0 didn't do this.
-#define HAVE_WSTRING 1
-
-// Do we have <new>?
-#define PHAVE_NEW 1
-
-
-// Do we have <io.h>?
-#define PHAVE_IO_H
-
-// Do we have <malloc.h>?
-#define PHAVE_MALLOC_H 1
-
-// Do we have <alloca.h>?
-#define PHAVE_ALLOCA_H 1
-
-// Do we have <locale.h>?
-#define PHAVE_LOCALE_H 1
-
-// Do we have <string.h>?
-#define PHAVE_STRING_H 1
-
-// Do we have <stdlib.h>?
-#define PHAVE_STDLIB_H 1
-
-// Do we have <limits.h>?
-#define PHAVE_LIMITS_H 1
-
-// Do we have <minmax.h>?
-#define PHAVE_MINMAX_H
-
-// Do we have <sys/types.h>?
-#define PHAVE_SYS_TYPES_H 1
-#define PHAVE_SYS_TIME_H 1
-
-// Do we have <unistd.h>?
-#define PHAVE_UNISTD_H 1
-
-// Do we have <utime.h>?
-#define PHAVE_UTIME_H 1
-
-// Do we have <dirent.h>?
-#define PHAVE_DIRENT_H 1
-
-// Do we have <glob.h> (and do we want to use it instead of dirent.h)?
-#define PHAVE_GLOB_H 1
-
-// Do we have <sys/soundcard.h> (and presumably a Linux-style audio
-// interface)?
-#define PHAVE_SYS_SOUNDCARD_H 1
-
-// Do we have <ucontext.h> (and therefore makecontext() / swapcontext())?
-#define PHAVE_UCONTEXT_H 1
-
-// Do we have <linux/input.h> ? This enables us to use raw mouse input.
-#define PHAVE_LINUX_INPUT_H 1
-
-// Do we have RTTI (and <typeinfo>)?
-#define HAVE_RTTI 1
-
-// Do we have <stdint.h>?
-#define PHAVE_STDINT_H 1
-
-// We need 64-bit file i/o
-#define __USE_LARGEFILE64 1
-
-// The dynamic library file extension (usually .so .dll or .dylib):
-#define DYNAMIC_LIB_EXT .so
-#define STATIC_LIB_EXT .a
-#define BUNDLE_EXT

+ 0 - 295
dtool/Config.OSX.pp

@@ -1,295 +0,0 @@
-//
-// Config.OSX.pp
-//
-// This file defines some custom config variables for the osx
-// platform.  It makes some initial guesses about compiler features,
-// etc.
-//
-
-// *******************************************************************
-// NOTE: you should not attempt to copy this file verbatim as your own
-// personal Config.pp file.  Instead, you should start with an empty
-// Config.pp file, and add lines to it when you wish to override
-// settings given in here.  In the normal ppremake system, this file
-// will always be read first, and then your personal Config.pp file
-// will be read later, which gives you a chance to override the
-// default settings found in this file.  However, if you start by
-// copying the entire file, it will be difficult to tell which
-// settings you have customized, and it will be difficult to upgrade
-// to a subsequent version of Panda.
-// *******************************************************************
-
-#define IS_OSX 1
-
-// Compiler flags
-
-#define CC gcc
-#define CXX g++
-#define C++FLAGS_GEN -ftemplate-depth-30
-
-// Configure for universal binaries on OSX.
-#defer ARCH_FLAGS $[if $[UNIVERSAL_BINARIES],-arch i386 -arch ppc -arch x86_64,]
-#define OSX_CDEFS
-#define OSX_CFLAGS -Wno-deprecated-declarations
-
-// Whether to build for Cocoa, Carbon or both.  64-bits systems do not
-// have Carbon.  We also disable it for universal and 64-bits builds.
-#define HAVE_COCOA 1
-#defer HAVE_CARBON $[not $[or $[eq $[shell uname -m], x86_64],$[UNIVERSAL_BINARIES]]]
-
-// How to compile a C or C++ file into a .o file.  $[target] is the
-// name of the .o file, $[source] is the name of the source file,
-// $[ipath] is a space-separated list of directories to search for
-// include files, and $[flags] is a list of additional flags to pass
-// to the compiler.
-
-#defer COMPILE_C $[CC] $[CFLAGS_GEN] $[ARCH_FLAGS] $[OSX_CFLAGS] -c -o $[target] $[ipath:%=-I%] $[flags] $[source]
-#defer COMPILE_C++ $[CXX] $[C++FLAGS_GEN] $[ARCH_FLAGS] $[OSX_CFLAGS] -c -o $[target] $[ipath:%=-I%] $[flags] $[source]
-
-// What flags should be passed to both C and C++ compilers to enable
-// debug symbols?  This will be supplied when OPTIMIZE (above) is set
-// to 1, 2, or 3.
-#defer DEBUGFLAGS -g
-
-// What flags should be passed to both C and C++ compilers to enable
-// compiler optimizations?  This will be supplied when OPTIMIZE
-// (above) is set to 2, 3, or 4.
-#defer OPTFLAGS -O2
-
-// By convention, any source file that contains the string _no_opt_ in
-// its filename won't have the above compiler optimizations run for it.
-#defer no_opt $[findstring _no_opt_,$[source]]
-
-// What define variables should be passed to the compilers for each
-// value of OPTIMIZE?  We separate this so we can pass these same
-// options to interrogate, guaranteeing that the correct interfaces
-// are generated.  Do not include -D here; that will be supplied
-// automatically.
-#defer CDEFINES_OPT1 $[EXTRA_CDEFS] $[OSX_CDEFS]
-#defer CDEFINES_OPT2 $[EXTRA_CDEFS] $[OSX_CDEFS]
-#defer CDEFINES_OPT3 $[EXTRA_CDEFS] $[OSX_CDEFS]
-#defer CDEFINES_OPT4 $[EXTRA_CDEFS] $[OSX_CDEFS]
-
-// What additional flags should be passed for each value of OPTIMIZE
-// (above)?  We separate out the compiler-optimization flags, above,
-// so we can compile certain files that give optimizers trouble (like
-// the output of lex and yacc) without them, but with all the other
-// relevant flags.
-#defer CFLAGS_OPT1 $[CDEFINES_OPT1:%=-D%] -Wall $[DEBUGFLAGS]
-#defer CFLAGS_OPT2 $[CDEFINES_OPT2:%=-D%] -Wall $[DEBUGFLAGS] $[if $[no_opt],,$[OPTFLAGS]]
-#defer CFLAGS_OPT3 $[CDEFINES_OPT3:%=-D%] $[DEBUGFLAGS] $[if $[no_opt],,$[OPTFLAGS]]
-#defer CFLAGS_OPT4 $[CDEFINES_OPT4:%=-D%] $[if $[no_opt],,$[OPTFLAGS]]
-
-// What additional flags should be passed to both compilers when
-// building shared (relocatable) sources?  Some architectures require
-// special support for this.
-#defer CFLAGS_SHARED -fPIC
-
-// How to generate a C or C++ executable from a collection of .o
-// files.  $[target] is the name of the binary to generate, and
-// $[sources] is the list of .o files.  $[libs] is a space-separated
-// list of dependent libraries, and $[lpath] is a space-separated list
-// of directories in which those libraries can be found.
-#defer link_bin_opts $[ARCH_FLAGS] $[OSX_CFLAGS] \
- $[if $[not $[LINK_ALL_STATIC]],-undefined dynamic_lookup] \
- -o $[target] $[sources]\
- $[flags]\
- $[lpath:%=-L%] $[libs:%=-l%]\
- $[fpath:%=-Wl,-F%] $[patsubst %,-framework %, $[bin_frameworks]]
-
-#defer LINK_BIN_C $[cc_ld] $[link_bin_opts]
-#defer LINK_BIN_C++ $[cxx_ld] $[link_bin_opts]
-
-// How to generate a static C or C++ library.  $[target] is the
-// name of the library to generate, and $[sources] is the list of .o
-// files that will go into the library.
-#defer STATIC_LIB_C libtool -static -o $[target] $[sources]
-#defer STATIC_LIB_C++ libtool -static -o $[target] $[sources]
-
-// How to run ranlib, if necessary, after generating a static library.
-// $[target] is the name of the library.  Set this to the empty string
-// if ranlib is not necessary on your platform.
-#defer RANLIB ranlib $[target]
-
-// Where to put the so_locations file, used by an Irix MIPSPro
-// compiler, to generate a map of shared library memory locations.
-#defer SO_LOCATIONS $[DTOOL_INSTALL]/etc/so_locations
-
-
-// How to generate a shared C or C++ library.  $[source] and $[target]
-// as above, and $[libs] is a space-separated list of dependent
-// libraries, and $[lpath] is a space-separated list of directories in
-// which those libraries can be found.
-#defer SHARED_LIB_C $[cc_ld] $[ARCH_FLAGS] $[OSX_CFLAGS] -o $[target] -dynamiclib -install_name $[notdir $[target]] $[sources] $[lpath:%=-L%] $[libs:%=-l%] $[patsubst %,-framework %, $[frameworks]]
-#defer SHARED_LIB_C++ $[cxx_ld] $[ARCH_FLAGS] $[OSX_CFLAGS] -undefined dynamic_lookup -dynamic -dynamiclib -o $[target] -dynamiclib -install_name $[notdir $[target]] $[sources] $[lpath:%=-L%] $[libs:%=-l%] $[patsubst %,-framework %, $[frameworks]]
-#defer BUNDLE_LIB_C++ $[cxx_ld] $[ARCH_FLAGS] $[OSX_CFLAGS] -undefined dynamic_lookup -bundle -o $[target] $[sources] $[lpath:%=-L%] $[libs:%=-l%] $[patsubst %,-framework %, $[frameworks]]
-
-// How to install a data file or executable file.  $[local] is the
-// local name of the file to install, and $[dest] is the name of the
-// directory to put it in.
-
-// On Unix systems, we strongly prefer using the install program to
-// install files.  This has nice features like automatically setting
-// the permissions bits, and also is usually clever enough to install
-// a running program without crashing the running instance.  However,
-// it doesn't understanding installing a program from a subdirectory,
-// so we have to cd into the source directory first.
-#defer install_dash_p $[if $[KEEP_TIMESTAMPS],-p,]
-#defer INSTALL $[if $[ne $[dir $[local]], ./],cd ./$[dir $[local]] &&] install -m $[INSTALL_UMASK_DATA] $[install_dash_p] $[notdir $[local]] $[dest]/
-#defer INSTALL_PROG $[if $[ne $[dir $[local]], ./],cd ./$[dir $[local]] &&] install -m $[INSTALL_UMASK_PROG] $[install_dash_p] $[notdir $[local]] $[dest]/
-
-
-// Assume that OSX has OpenGL available.
-#define HAVE_GL 1
-
-// What additional flags should we pass to interrogate?
-#define BASE_IGATE_FLAGS -D__FLT_EVAL_METHOD__=0  -D__const=const -Dvolatile -Dmutable -D__LITTLE_ENDIAN__ -D__inline__=inline -D__GNUC__
-#define IGATE_ARCH -D__i386__
-#defer SYSTEM_IGATE_FLAGS $[BASE_IGATE_FLAGS] $[IGATE_ARCH]
-
-// We don't need worry about defining WORDS_BIGENDIAN (and we
-// shouldn't anyway, since ppc and intel are different).  We rely on
-// dtoolbase.h to determine this at compilation time.
-#define WORDS_BIGENDIAN 
-
-// Does the C++ compiler support namespaces?
-#define HAVE_NAMESPACE 1
-
-// Does the C++ compiler support ios::binary?
-#define HAVE_IOS_BINARY 1
-
-// How about the typename keyword?
-#define HAVE_TYPENAME 1
-
-// Will the compiler avoid inserting extra bytes in structs between a
-// base struct and its derived structs?  It is safe to define this
-// false if you don't know, but if you know that you can get away with
-// this you may gain a tiny performance gain by defining this true.
-// If you define this true incorrectly, you will get lots of
-// assertion failures on execution.
-#define SIMPLE_STRUCT_POINTERS
-
-// Does gettimeofday() take only one parameter?
-#define GETTIMEOFDAY_ONE_PARAM
-
-// Do we have getopt() and/or getopt_long_only() built into the
-// system?
-#define HAVE_GETOPT 1
-#define HAVE_GETOPT_LONG_ONLY 
-
-// Are the above getopt() functions defined in getopt.h, or somewhere else?
-#define PHAVE_GETOPT_H 1
-
-// Can we determine the terminal width by making an ioctl(TIOCGWINSZ) call?
-#define IOCTL_TERMINAL_WIDTH 1
-
-// Do the system headers define a "streamsize" typedef?  How about the
-// ios::binary enumerated value?  And other ios typedef symbols like
-// ios::openmode and ios::fmtflags?
-#define HAVE_STREAMSIZE 1
-#define HAVE_IOS_BINARY 1
-#define HAVE_IOS_TYPEDEFS 1
-
-// Can we safely call getenv() at static init time?
-#define STATIC_INIT_GETENV 1
-
-// Can we read the file /proc/self/* to determine our
-// environment variables at static init time?
-#define HAVE_PROC_SELF_EXE
-#define HAVE_PROC_SELF_MAPS
-#define HAVE_PROC_SELF_ENVIRON
-#define HAVE_PROC_SELF_CMDLINE
-
-// Do we have a global pair of argc/argv variables that we can read at
-// static init time?  Should we prototype them?  What are they called?
-#define HAVE_GLOBAL_ARGV
-#define PROTOTYPE_GLOBAL_ARGV
-#define GLOBAL_ARGV __Argv
-#define GLOBAL_ARGC __Argc
-
-// Should we include <iostream> or <iostream.h>?  Define PHAVE_IOSTREAM
-// to nonempty if we should use <iostream>, or empty if we should use
-// <iostream.h>.
-#define PHAVE_IOSTREAM 1
-
-// Do we have a true stringstream class defined in <sstream>?
-#define PHAVE_SSTREAM 1
-
-// Does fstream::open() require a third parameter, specifying the
-// umask?  Versions of gcc prior to 3.2 had this.
-#define HAVE_OPEN_MASK
-
-// Do we have the lockf() function available?
-#define HAVE_LOCKF 1
-
-// Do the compiler or system libraries define wchar_t for you?
-#define HAVE_WCHAR_T 1
-
-// Does <string> define the typedef wstring?  Most do, but for some
-// reason, versions of gcc before 3.0 didn't do this.
-#define HAVE_WSTRING 1
-
-// Do we have <new>?
-#define PHAVE_NEW 1
-
-// Do we have <io.h>?
-#define PHAVE_IO_H
-
-// Do we have <malloc.h>?
-#define PHAVE_MALLOC_H
-
-// Do we have <alloca.h>?
-#define PHAVE_ALLOCA_H 1
-
-// Do we have <locale.h>?
-#define PHAVE_LOCALE_H 1
-
-// Do we have <string.h>?
-#define PHAVE_STRING_H 1
-
-// Do we have <stdlib.h>?
-#define PHAVE_STDLIB_H 1
-
-// Do we have <limits.h>?
-#define PHAVE_LIMITS_H 1
-
-// Do we have <minmax.h>?
-#define PHAVE_MINMAX_H
-
-// Do we have <sys/types.h>?
-#define PHAVE_SYS_TYPES_H 1
-#define PHAVE_SYS_TIME_H 1
-
-// Do we have <unistd.h>?
-#define PHAVE_UNISTD_H 1
-
-// Do we have <utime.h>?
-#define PHAVE_UTIME_H 1
-
-// Do we have <dirent.h>?
-#define PHAVE_DIRENT_H 1
-
-// Do we have <glob.h> (and do we want to use it instead of dirent.h)?
-#define PHAVE_GLOB_H 1
-
-// Do we have <sys/soundcard.h> (and presumably a Linux-style audio
-// interface)?
-#define PHAVE_SYS_SOUNDCARD_H 1
-
-// Do we have <ucontext.h> (and therefore makecontext() / swapcontext())?
-#define PHAVE_UCONTEXT_H 1
-
-// Do we have RTTI (and <typeinfo>)?
-#define HAVE_RTTI 1
-
-// Do we have <stdint.h>?
-#define PHAVE_STDINT_H 1
-
-// The dynamic library file extension (usually .so .dll or .dylib):
-#define DYNAMIC_LIB_EXT .dylib
-#define STATIC_LIB_EXT .a
-
-// If you need to build .so files in addition to .dylibs, declare this
-// too.  Python 2.4 on OSX 10.4 seems to require this (it won't import
-// a .dylib file directly).
-//#define BUNDLE_EXT .so

+ 0 - 171
dtool/Config.Win32.pp

@@ -1,171 +0,0 @@
-//
-// Config.Win32.pp
-//
-// This file defines some custom config variables for the Windows
-// platform, using MS VC++.  It makes some initial guesses about
-// compiler features, etc.
-//
-
-// *******************************************************************
-// NOTE: you should not attempt to copy this file verbatim as your own
-// personal Config.pp file.  Instead, you should start with an empty
-// Config.pp file, and add lines to it when you wish to override
-// settings given in here.  In the normal ppremake system, this file
-// will always be read first, and then your personal Config.pp file
-// will be read later, which gives you a chance to override the
-// default settings found in this file.  However, if you start by
-// copying the entire file, it will be difficult to tell which
-// settings you have customized, and it will be difficult to upgrade
-// to a subsequent version of Panda.
-// *******************************************************************
-
-// What additional flags should we pass to interrogate?
-#define SYSTEM_IGATE_FLAGS -longlong __int64 -D_X86_ -DWIN32_VC -D"_declspec(param)=" -D"__declspec(param)=" -D_near  -D_far -D__near  -D__far -D_WIN32 -D__stdcall -Dvolatile -Dmutable -DWIN32
-
-// Additional flags to pass to the Tau instrumentor.
-#define TAU_INSTRUMENTOR_FLAGS -DTAU_USE_C_API -DPROFILING_ON -DWIN32_VC -D_WIN32 -D__cdecl= -D__stdcall= -D__fastcall= -D__i386 -D_MSC_VER=1310 -D_W64=  -D_INTEGRAL_MAX_BITS=64 --exceptions --late_tiebreaker --no_class_name_injection --no_warnings --restrict --microsoft --new_for_init
-
-// Is the platform big-endian (like an SGI workstation) or
-// little-endian (like a PC)?  Define this to the empty string to
-// indicate little-endian, or nonempty to indicate big-endian.
-#define WORDS_BIGENDIAN
-
-// Does the C++ compiler support namespaces?
-#define HAVE_NAMESPACE 1
-
-// Does the C++ compiler support ios::binary?
-#define HAVE_IOS_BINARY 1
-
-// How about the typename keyword?
-#define HAVE_TYPENAME 1
-
-// Will the compiler avoid inserting extra bytes in structs between a
-// base struct and its derived structs?  It is safe to define this
-// false if you don't know, but if you know that you can get away with
-// this you may gain a tiny performance gain by defining this true.
-// If you define this true incorrectly, you will get lots of
-// assertion failures on execution.
-#define SIMPLE_STRUCT_POINTERS 1
-
-// Does gettimeofday() take only one parameter?
-#define GETTIMEOFDAY_ONE_PARAM
-
-// Do we have getopt() and/or getopt_long_only() built into the
-// system?
-#define HAVE_GETOPT
-#define HAVE_GETOPT_LONG_ONLY
-
-// Are the above getopt() functions defined in getopt.h, or somewhere else?
-#define PHAVE_GETOPT_H
-
-// Can we determine the terminal width by making an ioctl(TIOCGWINSZ) call?
-#define IOCTL_TERMINAL_WIDTH
-
-// Do the system headers define a "streamsize" typedef?  How about the
-// ios::binary enumerated value?  And other ios typedef symbols like
-// ios::openmode and ios::fmtflags?
-#define HAVE_STREAMSIZE 1
-#define HAVE_IOS_BINARY 1
-#define HAVE_IOS_TYPEDEFS 1
-
-// Can we safely call getenv() at static init time?
-#define STATIC_INIT_GETENV 1
-
-// Can we read the file /proc/self/* to determine our
-// environment variables at static init time?
-#define HAVE_PROC_SELF_EXE
-#define HAVE_PROC_SELF_MAPS
-#define HAVE_PROC_SELF_ENVIRON
-#define HAVE_PROC_SELF_CMDLINE
-
-// Do we have a global pair of argc/argv variables that we can read at
-// static init time?  Should we prototype them?  What are they called?
-#define HAVE_GLOBAL_ARGV 1
-#define PROTOTYPE_GLOBAL_ARGV
-#define GLOBAL_ARGV __argv
-#define GLOBAL_ARGC __argc
-
-// Should we include <iostream> or <iostream.h>?  Define PHAVE_IOSTREAM
-// to nonempty if we should use <iostream>, or empty if we should use
-// <iostream.h>.
-#define PHAVE_IOSTREAM 1
-
-// Do we have a true stringstream class defined in <sstream>?
-#define PHAVE_SSTREAM 1
-
-// Does fstream::open() require a third parameter, specifying the
-// umask?
-#define HAVE_OPEN_MASK
-
-// Do we have the lockf() function available?
-#define HAVE_LOCKF 1
-
-// Do the compiler or system libraries define wchar_t for you?
-#define HAVE_WCHAR_T 1
-
-// Does <string> define the typedef wstring?  Most do, but for some
-// reason, versions of gcc before 3.0 didn't do this.
-#define HAVE_WSTRING 1
-
-// Do we have <new>?
-#define PHAVE_NEW 1
-
-// Do we have <io.h>?
-#define PHAVE_IO_H 1
-
-// Do we have <malloc.h>?
-#define PHAVE_MALLOC_H 1
-
-// Do we have <alloca.h>?
-#define PHAVE_ALLOCA_H
-
-// Do we have <locale.h>?
-#define PHAVE_LOCALE_H
-
-// Do we have <string.h>?
-#define PHAVE_STRING_H 1
-
-// Do we have <stdlib.h>?
-#define PHAVE_STDLIB_H
-
-// Do we have <limits.h>?
-#define PHAVE_LIMITS_H
-
-// Do we have <minmax.h>?
-#define PHAVE_MINMAX_H 1
-
-// Do we have <sys/types.h>?
-#define PHAVE_SYS_TYPES_H 1
-#define PHAVE_SYS_TIME_H
-
-// Do we have <unistd.h>?
-#define PHAVE_UNISTD_H
-
-// Do we have <utime.h>?
-#define PHAVE_UTIME_H
-
-// Do we have <dirent.h>?
-#define PHAVE_DIRENT_H
-
-// Do we have <sys/soundcard.h> (and presumably a Linux-style audio
-// interface)?
-#define PHAVE_SYS_SOUNDCARD_H
-
-// Do we have <ucontext.h> (and therefore makecontext() / swapcontext())?
-#define PHAVE_UCONTEXT_H
-
-// Do we have RTTI (and <typeinfo>)?
-#define HAVE_RTTI 1
-
-// Do we have <stdint.h>?
-#define PHAVE_STDINT_H
-
-// can Intel C++ build this directory successfully (if not, change CC to msvc)
-#define NOT_INTEL_BUILDABLE false
-
-// The dynamic library file extension (usually .so .dll or .dylib):
-#define DYNAMIC_LIB_EXT .dll
-#define STATIC_LIB_EXT .lib
-#define BUNDLE_EXT
-  
-

+ 0 - 171
dtool/Config.Win64.pp

@@ -1,171 +0,0 @@
-//
-// Config.Win64.pp
-//
-// This file defines some custom config variables for the Windows
-// platform, using MS VC++.  It makes some initial guesses about
-// compiler features, etc.
-//
-
-// *******************************************************************
-// NOTE: you should not attempt to copy this file verbatim as your own
-// personal Config.pp file.  Instead, you should start with an empty
-// Config.pp file, and add lines to it when you wish to override
-// settings given in here.  In the normal ppremake system, this file
-// will always be read first, and then your personal Config.pp file
-// will be read later, which gives you a chance to override the
-// default settings found in this file.  However, if you start by
-// copying the entire file, it will be difficult to tell which
-// settings you have customized, and it will be difficult to upgrade
-// to a subsequent version of Panda.
-// *******************************************************************
-
-// What additional flags should we pass to interrogate?
-#define SYSTEM_IGATE_FLAGS -longlong __int64 -D_X64_ -DWIN64_VC -D"_declspec(param)=" -D"__declspec(param)=" -D_near  -D_far -D__near  -D__far -D_WIN32 -D_WIN64 -D__stdcall -Dvolatile -Dmutable -DWIN64
-
-// Additional flags to pass to the Tau instrumentor.
-#define TAU_INSTRUMENTOR_FLAGS -DTAU_USE_C_API -DPROFILING_ON -DWIN64_VC -D_WIN32 -D_WIN64 -D__cdecl= -D__stdcall= -D__fastcall= -D__i386 -D_MSC_VER=1310 -D_W64=  -D_INTEGRAL_MAX_BITS=64 --exceptions --late_tiebreaker --no_class_name_injection --no_warnings --restrict --microsoft --new_for_init
-
-// Is the platform big-endian (like an SGI workstation) or
-// little-endian (like a PC)?  Define this to the empty string to
-// indicate little-endian, or nonempty to indicate big-endian.
-#define WORDS_BIGENDIAN
-
-// Does the C++ compiler support namespaces?
-#define HAVE_NAMESPACE 1
-
-// Does the C++ compiler support ios::binary?
-#define HAVE_IOS_BINARY 1
-
-// How about the typename keyword?
-#define HAVE_TYPENAME 1
-
-// Will the compiler avoid inserting extra bytes in structs between a
-// base struct and its derived structs?  It is safe to define this
-// false if you don't know, but if you know that you can get away with
-// this you may gain a tiny performance gain by defining this true.
-// If you define this true incorrectly, you will get lots of
-// assertion failures on execution.
-#define SIMPLE_STRUCT_POINTERS 1
-
-// Does gettimeofday() take only one parameter?
-#define GETTIMEOFDAY_ONE_PARAM
-
-// Do we have getopt() and/or getopt_long_only() built into the
-// system?
-#define HAVE_GETOPT
-#define HAVE_GETOPT_LONG_ONLY
-
-// Are the above getopt() functions defined in getopt.h, or somewhere else?
-#define PHAVE_GETOPT_H
-
-// Can we determine the terminal width by making an ioctl(TIOCGWINSZ) call?
-#define IOCTL_TERMINAL_WIDTH
-
-// Do the system headers define a "streamsize" typedef?  How about the
-// ios::binary enumerated value?  And other ios typedef symbols like
-// ios::openmode and ios::fmtflags?
-#define HAVE_STREAMSIZE 1
-#define HAVE_IOS_BINARY 1
-#define HAVE_IOS_TYPEDEFS 1
-
-// Can we safely call getenv() at static init time?
-#define STATIC_INIT_GETENV 1
-
-// Can we read the file /proc/self/* to determine our
-// environment variables at static init time?
-#define HAVE_PROC_SELF_EXE
-#define HAVE_PROC_SELF_MAPS
-#define HAVE_PROC_SELF_ENVIRON
-#define HAVE_PROC_SELF_CMDLINE
-
-// Do we have a global pair of argc/argv variables that we can read at
-// static init time?  Should we prototype them?  What are they called?
-#define HAVE_GLOBAL_ARGV 1
-#define PROTOTYPE_GLOBAL_ARGV
-#define GLOBAL_ARGV __argv
-#define GLOBAL_ARGC __argc
-
-// Should we include <iostream> or <iostream.h>?  Define PHAVE_IOSTREAM
-// to nonempty if we should use <iostream>, or empty if we should use
-// <iostream.h>.
-#define PHAVE_IOSTREAM 1
-
-// Do we have a true stringstream class defined in <sstream>?
-#define PHAVE_SSTREAM 1
-
-// Does fstream::open() require a third parameter, specifying the
-// umask?
-#define HAVE_OPEN_MASK
-
-// Do we have the lockf() function available?
-#define HAVE_LOCKF 1
-
-// Do the compiler or system libraries define wchar_t for you?
-#define HAVE_WCHAR_T 1
-
-// Does <string> define the typedef wstring?  Most do, but for some
-// reason, versions of gcc before 3.0 didn't do this.
-#define HAVE_WSTRING 1
-
-// Do we have <new>?
-#define PHAVE_NEW 1
-
-// Do we have <io.h>?
-#define PHAVE_IO_H 1
-
-// Do we have <malloc.h>?
-#define PHAVE_MALLOC_H 1
-
-// Do we have <alloca.h>?
-#define PHAVE_ALLOCA_H
-
-// Do we have <locale.h>?
-#define PHAVE_LOCALE_H
-
-// Do we have <string.h>?
-#define PHAVE_STRING_H 1
-
-// Do we have <stdlib.h>?
-#define PHAVE_STDLIB_H
-
-// Do we have <limits.h>?
-#define PHAVE_LIMITS_H
-
-// Do we have <minmax.h>?
-#define PHAVE_MINMAX_H 1
-
-// Do we have <sys/types.h>?
-#define PHAVE_SYS_TYPES_H 1
-#define PHAVE_SYS_TIME_H
-
-// Do we have <unistd.h>?
-#define PHAVE_UNISTD_H
-
-// Do we have <utime.h>?
-#define PHAVE_UTIME_H
-
-// Do we have <dirent.h>?
-#define PHAVE_DIRENT_H
-
-// Do we have <sys/soundcard.h> (and presumably a Linux-style audio
-// interface)?
-#define PHAVE_SYS_SOUNDCARD_H
-
-// Do we have <ucontext.h> (and therefore makecontext() / swapcontext())?
-#define PHAVE_UCONTEXT_H
-
-// Do we have RTTI (and <typeinfo>)?
-#define HAVE_RTTI 1
-
-// Do we have <stdint.h>?
-#define PHAVE_STDINT_H
-
-// can Intel C++ build this directory successfully (if not, change CC to msvc)
-#define NOT_INTEL_BUILDABLE false
-
-// The dynamic library file extension (usually .so .dll or .dylib):
-#define DYNAMIC_LIB_EXT .dll
-#define STATIC_LIB_EXT .lib
-#define BUNDLE_EXT
-  
-

+ 0 - 1237
dtool/Config.pp

@@ -1,1237 +0,0 @@
-//
-// dtool/Config.pp
-//
-// This file defines certain configuration variables that are written
-// into the various make scripts.  It is processed by ppremake (along
-// with the Sources.pp files in each of the various directories) to
-// generate build scripts appropriate to each environment.
-//
-
-// *******************************************************************
-// NOTE: you should not attempt to copy this file verbatim as your own
-// personal Config.pp file.  Instead, you should start with an empty
-// Config.pp file, and add lines to it when you wish to override
-// settings given in here.  In the normal ppremake system, this file
-// will always be read first, and then your personal Config.pp file
-// will be read later, which gives you a chance to override the
-// default settings found in this file.  However, if you start by
-// copying the entire file, it will be difficult to tell which
-// settings you have customized, and it will be difficult to upgrade
-// to a subsequent version of Panda.
-// *******************************************************************
-
-//
-// ppremake is capable of generating makefiles for Unix compilers such
-// as gcc or SGI's MipsPRO compiler, as well as for Windows
-// environments like Microsoft's Visual C++.  It can also,
-// potentially, generate Microsoft Developer's Studio project files
-// directly, although we haven't written the scripts to do this yet.
-// In principle, it can be extended to generate suitable build script
-// files for any number of different build environments.
-//
-// All of these build scripts can be tuned for a particular
-// environment via this file.  This is the place for the user to
-// specify which external packages are installed and where, or to
-// enable or disable certain optional features.  However, it is
-// suggested that rather than modify this file directly, you create a
-// custom file in your home directory and there redefine whatever
-// variables are appropriate, and set the environment variable
-// PPREMAKE_CONFIG to refer to it.  In this way, you can easily get an
-// updated source tree (including a new Config.pp) without risking
-// accidentally losing your customizations.  This also avoids having
-// to redefine the same variables in different packages (for instance,
-// in dtool and in panda).
-//
-// The syntax in this file resembles some hybrid between C++
-// preprocessor declarations and GNU make variables.  This is the same
-// syntax used in the various ppremake system configure files; it's
-// designed to be easy to use as a macro language to generate
-// makefiles and their ilk.
-//
-// Some of the variables below are defined using the #define command,
-// and others are defined using #defer.  The two are very similar in
-// their purpose; the difference is that, if the variable definition
-// includes references to other variables (e.g. $[varname]), then
-// #define will evaluate all of the other variable references
-// immediately and store the resulting expansion, while #defer will
-// store only the variable references themselves, and expand them when
-// the variable is later referenced.  It is very similar to the
-// relationship between := and = in GNU Make.
-// dtool/Config.pp
-
-// In general, #defer is used in this file, to allow the user to
-// redefine critical variables in his or her own Config.pp file.
-
-
-
-// What kind of build scripts are we generating?  This selects a
-// suitable template file from the ppremake system files.  The
-// allowable choices, at present, are:
-//
-//  unix      - Generate makefiles suitable for most Unix platforms.
-//  msvc      - Generate Visual C++ project files (still a work in progress)
-//  nmake     - Generate makefiles for Microsoft Visual C++, using
-//              Microsoft's nmake utility.
-//  gmsvc     - Generate makefiles similar to the above, using Microsoft
-//              Visual C++, but uses the Cygwin-supplied GNU make
-//              instead of Microsoft nmake.  This is potentially
-//              faster if you have multiple CPU's, since it supports
-//              distributed make.  It's a tiny bit slower if you're
-//              not taking advantage of distributed make, because of
-//              the overhead associated with Cygwin fork() calls.
-
-#if $[eq $[PLATFORM], Win32]
-  #define BUILD_TYPE nmake
-#elif $[eq $[PLATFORM], Cygwin]
-  #define BUILD_TYPE gmsvc
-#elif $[OSX_PLATFORM]
-  #define BUILD_TYPE unix
-#else
-  #define BUILD_TYPE unix
-#endif
-
-// What is the default install directory for all trees in the Panda
-// suite?  The default value for this variable is provided by
-// ppremake; on Unix machines it is the value of --prefix passed in to
-// the configure script, and on Windows machines the default is
-// hardcoded in config_msvc.h to C:\Panda3d.
-
-// You may also override this for a particular tree by defining a
-// variable name like DTOOL_INSTALL or PANDA_INSTALL.  (The
-// INSTALL_DIR variable will have no effect if you are using the
-// ctattach tools to control your attachment to the trees; but this
-// will be the case only if you are a member of the VR Studio.)
-
-// #define INSTALL_DIR /usr/local/panda
-
-// If you intend to use Panda only as a Python module, you may find
-// the following define useful (but you should put in the correct path
-// to site-packages within your own installed Python).  This will
-// install the Panda libraries into the standard Python search space
-// so that they can be accessed as Python modules.  (Also see the
-// PYTHON_IPATH variable, below.)
-
-// If you don't do this, you can still use Panda as a Python module,
-// but you must put /usr/local/panda/lib (or $INSTALL_DIR/lib) on
-// your PYTHONPATH.
-
-// #define INSTALL_LIB_DIR /usr/lib/python2.6/site-packages
-
-
-// The character used to separate components of an OS-specific
-// directory name depends on the platform (it is '/' on Unix, '\' on
-// Windows).  That character selection is hardcoded into Panda and
-// cannot be changed here.  (Note that an internal Panda filename
-// always uses the forward slash, '/', to separate the components of a
-// directory name.)
-
-// There's a different character used to separate the complete
-// directory names in a search path specification.  On Unix, the
-// normal convention is ':', on Windows, it has to be ';', because the
-// colon is already used to mark the drive letter.  This character is
-// selectable here.  Most users won't want to change this.  If
-// multiple characters are placed in this string, any one of them may
-// be used as a separator character.
-#define DEFAULT_PATHSEP $[if $[WINDOWS_PLATFORM],;,:]
-
-// What level of compiler optimization/debug symbols should we build?
-// The various optimize levels are defined as follows:
-//
-//   1 - No compiler optimizations, debug symbols, debug heap, lots of checks
-//   2 - Full compiler optimizations, debug symbols, debug heap, lots of checks
-//   3 - Full compiler optimizations, full debug symbols, fewer checks
-//   4 - Full optimizations, no debug symbols, and asserts removed
-//
-#define OPTIMIZE 3
-
-// On OSX, you may or may not want to compile universal binaries.
-
-// Turning this option on allows your compiled version of Panda to run
-// on any version of OSX (PPC or Intel-based), but it will also
-// increase the compilation time, as well as the resulting binary
-// size.  I believe you have to be building on an Intel-based platform
-// to generate universal binaries using this technique.  This option
-// has no effect on non-OSX platforms.
-#define UNIVERSAL_BINARIES
-
-// Panda uses prc files for runtime configuration.  There are many
-// compiled-in options to customize the behavior of the prc config
-// system; most users won't need to change any of them.  Feel free to
-// skip over all of the PRC_* variables defined here.
-
-// The default behavior is to search for files names *.prc in the
-// directory specified by the PRC_DIR environment variable, and then
-// to search along all of the directories named by the PRC_PATH
-// environment variable.  Either of these variables might be
-// undefined; if both of them are undefined, the default is to search
-// in the directory named here by DEFAULT_PRC_DIR.
-
-// By default, we specify the install/etc dir, which is where the
-// system-provided PRC files get copied to.
-#defer DEFAULT_PRC_DIR $[INSTALL_DIR]/etc
-
-// You can specify the names of the environment variables that are
-// used to specify the search location(s) for prc files at runtime.
-// These are space-separated lists of environment variable names.
-// Specify empty string for either one of these to disable the
-// feature.  For instance, redefining PRC_DIR_ENVVARS here to
-// PANDA_PRC_DIR would cause the environment variable $PANDA_PRC_DIR
-// to be consulted at startup instead of the default value of
-// $PRC_DIR.
-#define PRC_DIR_ENVVARS PRC_DIR
-#define PRC_PATH_ENVVARS PRC_PATH
-
-// You can specify the name of the file(s) to search for in the above
-// paths to be considered a config file.  This should be a
-// space-separated list of filename patterns.  This is *.prc by
-// default; normally there's no reason to change this.
-#define PRC_PATTERNS *.prc
-
-// You can optionally encrypt your prc file(s) to help protect them
-// from curious eyes.  You have to specify the encryption key, which
-// gets hard-coded into the executable.  (This feature provides mere
-// obfuscation, not real security, since the encryption key can
-// potentially be extracted by a hacker.)  This requires building with
-// OpenSSL (see below).
-#define PRC_ENCRYPTED_PATTERNS *.prc.pe
-#define PRC_ENCRYPTION_KEY ""
-
-// One unusual feature of config is the ability to execute one or more
-// of the files it discovers as if it were a program, and then treat
-// the output of this program as a prc file.  If you want to use this
-// feature, define this variable to the filename pattern or patterns
-// for such executable-style config programs (e.g. *prc.exe).  This
-// can be the same as the above if you like this sort of ambiguity; in
-// that case, config will execute the file if it appears to be
-// executable; otherwise, it will simply read it.
-#define PRC_EXECUTABLE_PATTERNS
-
-// If you do use the above feature, you'll need another environment
-// variable that specifies additional arguments to pass to the
-// executable programs.  The default definition, given here, makes
-// that variable be $PRC_EXECUTABLE_ARGS.  Sorry, the same arguments
-// must be supplied to all executables in a given runtime session.
-#define PRC_EXECUTABLE_ARGS_ENVVAR PRC_EXECUTABLE_ARGS
-
-// You can implement signed prc files, if you require this advanced
-// feature.  This allows certain config variables to be set only by a
-// prc file that has been provided by a trusted source.  To do this,
-// first install and compile Dtool with OpenSSL (below) and run the
-// program make-prc-key, and then specify here the output filename
-// generated by that program, and then recompile Dtool (ppremake; make
-// install).
-#define PRC_PUBLIC_KEYS_FILENAME
-
-// By default, the signed-prc feature, above, is enabled only for a
-// release build (OPTIMIZE = 4).  In a normal development environment
-// (OPTIMIZE < 4), any prc file can set any config variable, whether
-// or not it is signed.  Set this variable true (nonempty) or false
-// (empty) to explicitly enable or disable this feature.
-#defer PRC_RESPECT_TRUST_LEVEL $[= $[OPTIMIZE],4]
-
-// If trust level is in effect, this specifies the default trust level
-// for any legacy (Dconfig) config variables (that is, variables
-// created using the config.GetBool(), etc. interface, rather than the
-// newer ConfigVariableBool interface).
-#defer PRC_DCONFIG_TRUST_LEVEL 0
-
-// If trust level is in effect, you may globally increment the
-// (mis)trust level of all variables by the specified amount.
-// Incrementing this value by 1 will cause all variables to require at
-// least a level 1 signature.
-#define PRC_INC_TRUST_LEVEL 0
-
-// Similarly, the descriptions are normally saved only in a
-// development build, not in a release build.  Set this value true to
-// explicitly save them anyway.
-#defer PRC_SAVE_DESCRIPTIONS $[< $[OPTIMIZE],4]
-
-// This is the end of the PRC variable customization section.  The
-// remaining variables are of general interest to everyone.
-
-
-// You may define this to build or develop the plugin.
-//#define HAVE_P3D_PLUGIN 1
-
-// You may define both of these to build or develop the Panda3D
-// rtdist, the environment packaged up for distribution with the
-// plugin.
-//#define PANDA_PACKAGE_VERSION local_dev
-//#define PANDA_PACKAGE_HOST_URL http://some.url/
-#defer HAVE_P3D_RTDIST $[PANDA_PACKAGE_HOST_URL]
-
-
-
-// NOTE: In the following, to indicate "yes" to a yes/no question,
-// define the variable to be a nonempty string.  To indicate "no",
-// define the variable to be an empty string.
-
-// Many of the HAVE_* variables are defined in terms of expressions
-// based on the paths and library names, etc., defined above.  These
-// are defined using the "defer" command, so that they are not
-// evaluated right away, giving the user an opportunity to redefine
-// the variables they depend on, or to redefine the HAVE_* variables
-// themselves (you can explicitly define a HAVE_* variable to some
-// nonempty string to force the package to be marked as installed).
-
-
-// Do you want to generate a Python-callable interrogate interface?
-// This is only necessary if you plan to make calls into Panda from a
-// program written in Python.  This is done only if HAVE_PYTHON,
-// below, is also true.
-#define INTERROGATE_PYTHON_INTERFACE 1
-
-// Define this true to use the new interrogate feature to generate
-// Python-native objects directly, rather than requiring a separate
-// FFI step.  This loads and runs much more quickly than the original
-// mechanism.  Define this false (that is, empty) to use the original
-// interfaces.
-#define PYTHON_NATIVE 1
-
-// Do you want to generate a C-callable interrogate interface?  This
-// generates an interface similar to the Python interface above, with
-// a C calling convention.  It should be useful for most other kinds
-// of scripting language; the VR Studio used to use this to make calls
-// into Panda from Squeak.  This is not presently used by any VR
-// Studio code.
-#define INTERROGATE_C_INTERFACE
-
-// Do you even want to build interrogate at all?  This is the program
-// that reads our C++ source files and generates one of the above
-// interfaces.  If you won't be building the interfaces, you don't
-// need the program.
-#defer HAVE_INTERROGATE $[or $[INTERROGATE_PYTHON_INTERFACE],$[INTERROGATE_C_INTERFACE]]
-
-// What additional options should be passed to interrogate when
-// generating either of the above two interfaces?  Generally, you
-// probably don't want to mess with this.
-#define INTERROGATE_OPTIONS -fnames -string -refcount -assert
-
-// What's the name of the interrogate binary to run?  The default
-// specified is the one that is built as part of DTOOL.  If you have a
-// prebuilt binary standing by (for instance, one built opt4), specify
-// its name instead.
-#define INTERROGATE interrogate
-#define INTERROGATE_MODULE interrogate_module
-
-// What is the name of the C# compiler binary?
-#define CSHARP csc
-
-// This defines the include path to the Eigen linear algebra library.
-// If this is provided, Panda will use this library as the fundamental
-// implementation of its own linmath library; otherwise, it will use
-// its own internal implementation.  The primary advantage of using
-// Eigen is SSE2 support, which is only activated if LINMATH_ALIGN
-// is also enabled.  (However, activating LINMATH_ALIGN does
-// constrain most objects in Panda to 16-byte alignment, which could
-// impact memory usage on very-low-memory platforms.)  Currently
-// experimental.
-#define EIGEN_IPATH 
-#defer EIGEN_CFLAGS $[if $[WINDOWS_PLATFORM],/arch:SSE2,-msse2]
-#defer HAVE_EIGEN $[isdir $[EIGEN_IPATH]/Eigen]
-#define LINMATH_ALIGN 1
-
-// Is Python installed, and should Python interfaces be generated?  If
-// Python is installed, which directory is it in?
-#define PYTHON_IPATH /usr/include/python2.6
-#define PYTHON_LPATH
-#define PYTHON_FPATH
-#define PYTHON_COMMAND python
-#defer PYTHON_DEBUG_COMMAND $[PYTHON_COMMAND]$[if $[WINDOWS_PLATFORM],_d]
-#define PYTHON_FRAMEWORK
-#defer HAVE_PYTHON $[or $[PYTHON_FRAMEWORK],$[isdir $[PYTHON_IPATH]]]
-
-// By default, we'll assume the user only wants to run with Debug
-// python if he has to--that is, on Windows when building a debug build.
-#defer USE_DEBUG_PYTHON $[and $[< $[OPTIMIZE],3],$[WINDOWS_PLATFORM]]
-
-// Define the default set of libraries to be instrumented by
-// genPyCode.  You may wish to add to this list to add your own
-// libraries, or if you want to use some of the more obscure
-// interfaces like libpandaegg and libpandafx.
-#defer GENPYCODE_LIBS libpandaexpress libpanda libpandaphysics libp3direct libpandafx libp3vision $[if $[HAVE_ODE],libpandaode] $[if $[HAVE_VRPN],libp3vrpn]
-
-// Normally, Python source files are copied into the INSTALL_LIB_DIR
-// defined above, along with the compiled C++ library objects, when
-// you make install.  If you prefer not to copy these Python source
-// files, but would rather run them directly out of the source
-// directory (presumably so you can develop them and make changes
-// without having to reinstall), comment out this definition and put
-// your source directory on your PYTHONPATH.
-#define INSTALL_PYTHON_SOURCE 1
-
-// Do you want to compile in support for tracking memory usage?  This
-// enables you to define the variable "track-memory-usage" at runtime
-// to help track memory leaks, and also report total memory usage on
-// PStats.  There is some small overhead for having this ability
-// available, even if it is unused.
-#defer DO_MEMORY_USAGE $[<= $[OPTIMIZE], 3]
-
-// This option compiles in support for simulating network delay via
-// the min-lag and max-lag prc variables.  It adds a tiny bit of
-// overhead even when it is not activated, so it is typically enabled
-// only in a development build.
-#defer SIMULATE_NETWORK_DELAY $[<= $[OPTIMIZE], 3]
-
-// This option compiles in support for immediate-mode OpenGL
-// rendering.  Since this is normally useful only for researching
-// buggy drivers, and since there is a tiny bit of per-primitive
-// overhead to have this option available even if it is unused, it is
-// by default enabled only in a development build.  This has no effect
-// on DirectX rendering.
-#defer SUPPORT_IMMEDIATE_MODE $[<= $[OPTIMIZE], 3]
-
-// These are two optional alternative memory-allocation schemes
-// available within Panda.  You can experiment with either of them to
-// see if they give better performance than the system malloc(), but
-// at the time of this writing, it doesn't appear that they do.
-#define USE_MEMORY_DLMALLOC
-#define USE_MEMORY_PTMALLOC2
-
-// Set this true if you prefer to use the system malloc library even
-// if 16-byte alignment must be performed on top of it, wasting up to
-// 30% of memory usage.  If you do not set this, and 16-byte alignment
-// is required and not provided by the system malloc library, then an
-// alternative malloc system (above) will be used instead.
-#define MEMORY_HOOK_DO_ALIGN
-
-// Panda contains some experimental code to compile for IPhone.  This
-// requires the Apple IPhone SDK, which is currently only available
-// for OS X platforms.  Set this to either "iPhoneSimulator" or
-// "iPhoneOS".  Note that this is still *experimental* and incomplete!
-// Don't enable this unless you know what you're doing!
-#define BUILD_IPHONE
-
-// Panda contains some experimental code to compile for Android.  This
-// requires the Google Android NDK.
-// Besides BUILD_ANDROID, you'll also have to set ANDROID_NDK_HOME
-// to the location of the Android NDK directory.  ANDROID_NDK_HOME may
-// not contain any spaces.
-// Furthermore, ANDROID_ABI can be set to armeabi, armeabi-v7a, x86,
-// or mips, depending on which architecture should be targeted.
-#define ANDROID_NDK_HOME
-#define ANDROID_ABI armeabi
-#define ANDROID_STL gnustl_shared
-#define ANDROID_PLATFORM android-9
-#define ANDROID_ARCH arm
-#defer ANDROID_TOOLCHAIN $[if $[eq $[ANDROID_ARCH],arm],arm-linux-androideabi]
-
-// Do you want to use one of the alternative malloc implementations?
-// This is almost always a good idea on Windows, where the standard
-// malloc implementation appears to be pretty poor, but probably
-// doesn't matter much on Linux (which is likely to implement
-// ptmalloc2 anyway).  We always define this by default on Windows; on
-// Linux, we define it by default only when DO_MEMORY_USAGE is enabled
-// (since in that case, we'll be paying the overhead for the extra
-// call anyway) or when HAVE_THREADS is not defined (since the
-// non-thread-safe dlmalloc is a tiny bit faster than the system
-// library).
-
-// In hindsight, let's not enable this at all.  It just causes
-// problems.
-//#defer ALTERNATIVE_MALLOC $[or $[WINDOWS_PLATFORM],$[DO_MEMORY_USAGE],$[not $[HAVE_THREADS]]]
-#define ALTERNATIVE_MALLOC
-
-// Define this true to use the DELETED_CHAIN macros, which support
-// fast re-use of existing allocated blocks, minimizing the low-level
-// calls to malloc() and free() for frequently-created and -deleted
-// objects.  There's usually no reason to set this false, unless you
-// suspect a bug in Panda's memory management code.
-#define USE_DELETED_CHAIN 1
-
-// Define this if you are building on Windows 7 or better, and you
-// want your Panda build to run only on Windows 7 or better, and you
-// need to use the Windows touchinput interfaces.
-#define HAVE_WIN_TOUCHINPUT
-
-// Define this true to build the low-level native network
-// implementation.  Normally this should be set true.
-#define WANT_NATIVE_NET 1
-#define NATIVE_NET_IPATH
-#define NATIVE_NET_LPATH
-#define NATIVE_NET_LIBS $[if $[WINDOWS_PLATFORM],wsock32.lib]
-
-// Do you want to build the high-level network interface?  This layers
-// on top of the low-level native_net interface, specified above.
-// Normally, if you build NATIVE_NET, you will also build NET.
-#defer HAVE_NET $[WANT_NATIVE_NET]
-
-// Do you want to build the egg loader?  Usually there's no reason to
-// avoid building this, unless you really want to make a low-footprint
-// build (such as, for instance, for the iPhone).
-#define HAVE_EGG 1
-
-// Is a third-party STL library installed, and where?  This is only
-// necessary if the default include and link lines that come with the
-// compiler don't provide adequate STL support.  At least some form of
-// STL is absolutely required in order to build Panda.
-#define STL_IPATH
-#define STL_LPATH
-#define STL_CFLAGS
-#define STL_LIBS
-
-// Does your STL library provide hashed associative containers like
-// hash_map and hash_set?  Define this true if you have a nonstandard
-// STL library that provides these, like Visual Studio .NET's.  (These
-// hashtable containers are not part of the C++ standard yet, but the
-// Dinkum STL library that VC7 ships with includes a preliminary
-// implementation that Panda can optionally use.)  For now, we assume
-// you have this by default only on a Windows platform.
-
-// On second thought, it turns out that this API is still too
-// volatile.  The interface seems to have changed with the next
-// version of .NET, and it didn't present any measureable performance
-// gain anyway.  Never mind.
-#define HAVE_STL_HASH
-
-// Is OpenSSL installed, and where?
-#define OPENSSL_IPATH
-#define OPENSSL_LPATH
-#define OPENSSL_LIBS ssl crypto
-#defer HAVE_OPENSSL $[libtest $[OPENSSL_LPATH],$[OPENSSL_LIBS]]
-
-// Define this true to include the OpenSSL code to report verbose
-// error messages when they occur.
-#defer REPORT_OPENSSL_ERRORS $[< $[OPTIMIZE], 4]
-
-// Is libjpeg installed, and where?
-#define JPEG_IPATH
-#define JPEG_LPATH
-#define JPEG_LIBS jpeg
-#defer HAVE_JPEG $[libtest $[JPEG_LPATH],$[JPEG_LIBS]]
-
-// Some versions of libjpeg did not provide jpegint.h.  Redefine this
-// to empty if you lack this header file.
-#define PHAVE_JPEGINT_H 1
-
-// Do you want to compile video-for-linux?  If you have an older Linux
-// system with incompatible headers, define this to empty string.
-#defer HAVE_VIDEO4LINUX $[IS_LINUX]
-
-// Is libpng installed, and where?
-#define PNG_IPATH
-#define PNG_LPATH
-#define PNG_LIBS png
-#defer HAVE_PNG $[libtest $[PNG_LPATH],$[PNG_LIBS]]
-
-// Is libtiff installed, and where?
-#define TIFF_IPATH
-#define TIFF_LPATH
-#define TIFF_LIBS tiff z
-#defer HAVE_TIFF $[libtest $[TIFF_LPATH],$[TIFF_LIBS]]
-
-// These image file formats don't require the assistance of a
-// third-party library to read and write, so there's normally no
-// reason to disable them in the build, unless you are looking to
-// reduce the memory footprint.
-#define HAVE_SGI_RGB 1
-#define HAVE_TGA 1
-#define HAVE_IMG 1
-#define HAVE_SOFTIMAGE_PIC 1
-#define HAVE_BMP 1
-#define HAVE_PNM 1
-
-// Is libtar installed, and where?  This is used to optimize patch
-// generation against tar files.
-#define TAR_IPATH
-#define TAR_LPATH
-#define TAR_LIBS tar
-#defer HAVE_TAR $[libtest $[TAR_LPATH],$[TAR_LIBS]]
-
-
-// Is libfftw installed, and where?
-#define FFTW_IPATH /opt/local/include
-#define FFTW_LPATH /opt/local/lib
-#define FFTW_LIBS rfftw fftw
-#defer HAVE_FFTW $[libtest $[FFTW_LPATH],$[FFTW_LIBS]]
-// This is because darwinport's version of the fftw lib is called
-// drfftw instead of rfftw.
-#defer PHAVE_DRFFTW_H $[libtest $[FFTW_LPATH],drfftw]
-
-// Is libsquish installed, and where?
-#define SQUISH_IPATH /usr/local/include
-#define SQUISH_LPATH /usr/local/lib
-#define SQUISH_LIBS squish
-#defer HAVE_SQUISH $[libtest $[SQUISH_LPATH],$[SQUISH_LIBS]]
-
-
-// Is Berkeley DB installed, and where?  Presently, this is only used
-// for some applications (egg-optchar in particular) in Pandatool, and
-// it is completely optional there.  If available, egg-optchar takes
-// advantage of it to allow the optimization of very large numbers of
-// models in one pass, that might otherwise exceed available memory.
-
-// Actually, this isn't even true anymore.  At the time of this writing,
-// no system in Panda makes use of Berkeley DB.  So don't bother to
-// define this.
-#define BDB_IPATH
-#define BDB_LPATH
-#define BDB_LIBS db db_cxx
-#defer HAVE_BDB $[libtest $[BDB_LPATH],$[BDB_LIBS]]
-
-// Is Cg installed, and where?
-#if $[WINDOWS_PLATFORM]
-  #define CG_IPATH
-  #define CG_LPATH
-  #define CG_LIBS cg.lib
-#else
-  #define CG_IPATH
-  #define CG_LPATH
-  #define CG_LIBS Cg
-#endif
-#define CG_FRAMEWORK
-#defer HAVE_CG $[or $[CG_FRAMEWORK],$[libtest $[CG_LPATH],$[CG_LIBS]]]
-
-// Is CgGL installed, and where?
-#defer CGGL_IPATH $[CG_IPATH]
-#defer CGGL_LPATH $[CG_LPATH]
-#define CGGL_LIBS $[if $[WINDOWS_PLATFORM],cgGL.lib,CgGL]
-#defer HAVE_CGGL $[or $[CGGL_FRAMEWORK],$[and $[HAVE_CG],$[libtest $[CGGL_LPATH],$[CGGL_LIBS]]]]
-
-// Is CgDX9 installed, and where?
-#defer CGDX9_IPATH $[CG_IPATH]
-#defer CGDX9_LPATH $[CG_LPATH]
-#define CGDX9_LIBS $[if $[WINDOWS_PLATFORM],cgD3D9.lib,CgDX9]
-#defer HAVE_CGDX9 $[and $[HAVE_CG],$[libtest $[CGDX9_LPATH],$[CGDX9_LIBS]]]
-
-// Is CgDX10 installed, and where?
-#defer CGDX10_IPATH $[CG_IPATH]
-#defer CGDX10_LPATH $[CG_LPATH]
-#define CGDX10_LIBS $[if $[WINDOWS_PLATFORM],cgD3D10.lib,CgDX10]
-#defer HAVE_CGDX10 $[and $[HAVE_CG],$[libtest $[CGDX10_LPATH],$[CGDX10_LIBS]]]
-
-// Is VRPN installed, and where?
-#define VRPN_IPATH
-#define VRPN_LPATH
-#define VRPN_LIBS
-#defer HAVE_VRPN $[libtest $[VRPN_LPATH],$[VRPN_LIBS]]
-
-// Is HELIX installed, and where?
-#define HELIX_IPATH
-#define HELIX_LPATH
-#define HELIX_LIBS
-#defer HAVE_HELIX $[libtest $[HELIX_LPATH],$[HELIX_LIBS]]
-
-// Is ZLIB installed, and where?
-#define ZLIB_IPATH
-#define ZLIB_LPATH
-#define ZLIB_LIBS z
-#defer HAVE_ZLIB $[libtest $[ZLIB_LPATH],$[ZLIB_LIBS]]
-
-// Is OpenGL installed, and where?
-#defer GL_IPATH /usr/include
-#defer GL_LPATH
-#defer GL_LIBS
-#if $[WINDOWS_PLATFORM]
-  #define GL_LIBS opengl32.lib
-#elif $[OSX_PLATFORM]
-  #defer GL_FRAMEWORK OpenGL
-#else
-  #defer GL_LPATH /usr/X11R6/lib
-  #defer GL_LIBS GL
-#endif
-#defer HAVE_GL $[libtest $[GL_LPATH],$[GL_LIBS]]
-
-// If you are having trouble linking in OpenGL extension functions at
-// runtime for some reason, you can set this variable.  This defines
-// the minimum runtime version of OpenGL that Panda will require.
-// Setting it to a higher version will compile in hard references to
-// the extension functions provided by that OpenGL version and below,
-// which may reduce runtime portability to other systems, but it will
-// avoid issues with getting extension function pointers.  It also, of
-// course, requires you to install the OpenGL header files and
-// compile-time libraries appropriate to the version you want to
-// compile against.
-
-// The variable is the major, minor version of OpenGL, separated by a
-// space (instead of a dot).  Thus, "1 1" means OpenGL version 1.1.
-#define MIN_GL_VERSION 1 1
-
-// Do you want to build tinydisplay, a light and fast software
-// renderer built into Panda, based on TinyGL?  This isn't as
-// full-featured as Mesa, but it is many times faster, and in fact
-// competes favorably with hardware-accelerated integrated graphics
-// cards for raw speed (though the hardware-accelerated output looks
-// better).
-#define HAVE_TINYDISPLAY 1
-
-// Is OpenGL ES 1.x installed, and where? This is a minimal subset of
-// OpenGL for mobile devices.
-#define GLES_IPATH
-#define GLES_LPATH
-#define GLES_LIBS GLES_cm
-#defer HAVE_GLES $[libtest $[GLES_LPATH],$[GLES_LIBS]]
-
-// OpenGL ES 2.x is a version of OpenGL ES but without fixed-function
-// pipeline - everything is programmable there.
-#define GLES2_IPATH
-#define GLES2_LPATH
-#define GLES2_LIBS GLESv2
-#defer HAVE_GLES2 $[libtest $[GLES2_LPATH],$[GLES2_LIBS]]
-
-// EGL is like GLX, but for OpenGL ES.
-#defer EGL_IPATH
-#defer EGL_LPATH
-#defer EGL_LIBS EGL
-#defer HAVE_EGL $[libtest $[EGL_LPATH],$[EGL_LIBS]]
-
-// The SDL library is useful only for tinydisplay, and is not even
-// required for that, as tinydisplay is also supported natively on
-// each supported platform.
-#define SDL_IPATH
-#define SDL_LPATH
-#define SDL_LIBS
-#defer HAVE_SDL $[libtest $[SDL_LPATH],$[SDL_LIBS]]
-
-// X11 may need to be linked against for tinydisplay, but probably
-// only on a Linux platform.
-#define X11_IPATH
-#define X11_LPATH /usr/X11R6/lib
-#define X11_LIBS X11
-#defer HAVE_X11 $[and $[UNIX_PLATFORM],$[libtest $[X11_LPATH],$[X11_LIBS]]]
-
-// This defines if we have XF86DGA installed. This enables smooth
-// FPS-style mouse in x11display, when mouse mode M_relative is used.
-#define XF86DGA_IPATH /usr/include/X11/extensions
-#define XF86DGA_LPATH /usr/lib
-#define XF86DGA_LIBS Xxf86dga
-#defer HAVE_XF86DGA $[libtest $[XF86DGA_LPATH],$[XF86DGA_LIBS]]
-
-// This defines if we have XRANDR installed. This
-// enables resolution switching in x11display.
-#define XRANDR_IPATH /usr/include/X11/extensions
-#define XRANDR_LPATH /usr/lib
-#define XRANDR_LIBS Xrandr
-#defer HAVE_XRANDR $[libtest $[XRANDR_LPATH],$[XRANDR_LIBS]]
-
-// This defines if we have XCURSOR installed. This
-// enables custom cursor support in x11display.
-#define XCURSOR_IPATH /usr/include/X11/extensions
-#define XCURSOR_LPATH /usr/lib
-#define XCURSOR_LIBS Xcursor
-#defer HAVE_XCURSOR $[libtest $[XCURSOR_LPATH],$[XCURSOR_LIBS]]
-
-// How about GLX?
-#define GLX_IPATH
-#define GLX_LPATH
-#defer HAVE_GLX $[and $[HAVE_GL],$[HAVE_X11]]
-
-// glXGetProcAddress() is the function used to query OpenGL extensions
-// under X.  However, this function is itself an extension function,
-// leading to a chicken-and-egg problem.  One approach is to compile
-// in a hard reference to the function, another is to pull the
-// function address from the dynamic runtime.  Each has its share of
-// problems.  Panda's default behavior is to pull it from the dynamic
-// runtime; define this to compile in a reference to the function.
-// This is only relevant from platforms using OpenGL under X (for
-// instance, Linux).
-#define LINK_IN_GLXGETPROCADDRESS
-
-// Should we try to build the WGL interface?
-#defer HAVE_WGL $[and $[HAVE_GL],$[WINDOWS_PLATFORM]]
-
-// These interfaces are for OSX only.
-#define HAVE_COCOA
-#define HAVE_CARBON
-
-// Is DirectX9 available, and should we try to build with it?
-#define DX9_IPATH
-#define DX9_LPATH
-#define DX9_LIBS d3d9.lib d3dx9.lib dxerr9.lib
-#defer HAVE_DX9 $[libtest $[DX9_LPATH],$[DX9_LIBS]]
-
-// Set this nonempty to use <dxerr.h> instead of <dxerr9.h>.  The
-// choice between the two is largely based on which version of the
-// DirectX SDK(s) you might have installed.  The generic library is
-// the default for 64-bit windows.
-#defer USE_GENERIC_DXERR_LIBRARY $[WIN64_PLATFORM]
-
-// Do we have at least OpenCV 2.3?
-#define OPENCV_VER_23 1
-
-// Is OpenCV installed, and where?
-#define OPENCV_IPATH
-#define OPENCV_LPATH
-#defer OPENCV_LIBS $[if $[OPENCV_VER_23], opencv_highgui opencv_core, cv highgui cxcore]
-#defer HAVE_OPENCV $[libtest $[OPENCV_LPATH],$[OPENCV_LIBS]]
-
-// Is FFMPEG installed, and where?
-#define FFMPEG_IPATH /usr/include/ffmpeg
-#define FFMPEG_LPATH
-#define FFMPEG_LIBS $[if $[WINDOWS_PLATFORM],avcodec.lib avformat.lib avutil.lib swscale.lib swresample.lib,avcodec avformat avutil swscale swresample]
-#defer HAVE_FFMPEG $[libtest $[FFMPEG_LPATH],$[FFMPEG_LIBS]]
-// Define this if you compiled ffmpeg with libswscale enabled.
-#define HAVE_SWSCALE 1
-#define HAVE_SWRESAMPLE 1
-
-// Is ODE installed, and where?
-#define ODE_IPATH
-#define ODE_LPATH
-#define ODE_LIBS $[if $[WINDOWS_PLATFORM],ode.lib,ode]
-#define ODE_CFLAGS
-#defer HAVE_ODE $[libtest $[ODE_LPATH],$[ODE_LIBS]]
-
-// Is Awesomium installed, and where?
-#define AWESOMIUM_IPATH
-#define AWESOMIUM_LPATH
-#if $[OSX_PLATFORM]
-  #define AWESOMIUM_LIBS
-#else
-  #define AWESOMIUM_LIBS $[if $[WINDOWS_PLATFORM],awesomium.lib,awesomium]
-#endif
-#define AWESOMIUM_FRAMEWORK
-#defer HAVE_AWESOMIUM $[libtest $[AWESOMIUM_LPATH],$[AWESOMIUM_LIBS]]
-
-// Mozilla's so-called Gecko SDK, a.k.a. Xulrunner SDK, implements
-// NPAPI.  So does the OSX WebKit framework.  Either implementation
-// can be used to build a web plugin for Firefox, Safari, Chrome, and
-// other non-Microsoft browsers.
-#define NPAPI_IPATH
-#define NPAPI_LPATH
-#define NPAPI_LIBS
-#define NPAPI_FRAMEWORK
-#define HAVE_NPAPI
-
-#define HAVE_ACTIVEX $[WINDOWS_PLATFORM]
-
-// Do you want to build the DirectD tools for starting Panda clients
-// remotely?  This only affects the direct tree.  Enabling this may
-// cause libdirect.dll to fail to load on Win98 clients.
-#define HAVE_DIRECTD
-
-// If your system supports the Posix threads interface
-// (pthread_create(), etc.), define this true.
-#define HAVE_POSIX_THREADS $[and $[isfile /usr/include/pthread.h],$[not $[WINDOWS_PLATFORM]]]
-
-// Do you want to build in support for threading (multiprocessing)?
-// Building in support for threading will enable Panda to take
-// advantage of multiple CPU's if you have them (and if the OS
-// supports kernel threads running on different CPU's), but it will
-// slightly slow down Panda for the single CPU case, so this is not
-// enabled by default.
-#define HAVE_THREADS 1
-#define THREADS_LIBS $[if $[not $[WINDOWS_PLATFORM]],pthread]
-
-// If you have enabled threading support with HAVE_THREADS, the
-// default is to use OS-provided threading constructs, which usually
-// allows for full multiprogramming support (i.e. the program can take
-// advantage of multiple CPU's).  On the other hand, compiling in this
-// full OS-provided support can impose some substantial runtime
-// overhead, making the application run slower on a single-CPU
-// machine.  To avoid this overhead, but still gain some of the basic
-// functionality of threads (such as support for asynchronous model
-// loads), define SIMPLE_THREADS true in addition to HAVE_THREADS.
-// This will compile in a homespun cooperative threading
-// implementation that runs strictly on one CPU, adding very little
-// overhead over plain single-threaded code.
-#define SIMPLE_THREADS
-
-// If this is defined true, then OS threading constructs will be used
-// (if available) to perform context switches in the SIMPLE_THREADS
-// model, instead of strictly user-space calls like setjmp/longjmp.  A
-// mutex is used to ensure that only one thread runs at a time, so the
-// normal SIMPLE_THREADS optimizations still apply, and the normal
-// SIMPLE_THREADS scheduler is used to switch between threads (instead
-// of the OS scheduler).  This may be more portable and more reliable,
-// but it is a weird hybrid between user-space threads and os-provided
-// threads.  This has meaning only if SIMPLE_THREADS is also defined.
-#define OS_SIMPLE_THREADS 1
-
-// Whether threading is defined or not, you might want to validate the
-// thread and synchronization operations.  With threading enabled,
-// defining this will also enable deadlock detection and logging.
-// Without threading enabled, defining this will simply verify that a
-// mutex is not recursively locked.  There is, of course, additional
-// run-time overhead for these tests.
-#defer DEBUG_THREADS $[<= $[OPTIMIZE], 2]
-
-// Do you want to compile in support for pipelining?  This adds code
-// to maintain a different copy of the scene graph for each thread in
-// the render pipeline, so that app, cull, and draw may each safely
-// run in a separate thread, allowing maximum parallelization of CPU
-// processing for the frame.  Enabling this option does not *require*
-// you to use separate threads for rendering, but makes it possible.
-// However, compiling this option in does add some additional runtime
-// overhead even if it is not used.  By default, we enable pipelining
-// whenever threads are enabled, assuming that if you have threads,
-// you also want to use pipelining.  We also enable it at OPTIMIZE
-// level 1, since that enables additional runtime checks.
-#defer DO_PIPELINING $[or $[<= $[OPTIMIZE], 1],$[HAVE_THREADS]]
-
-// Define this true to implement mutexes and condition variables via
-// user-space spinlocks, instead of via OS-provided constructs.  This
-// is almost never a good idea, except possibly in very specialized
-// cases when you are building Panda for a particular application, on
-// a particular platform, and you are sure you won't have more threads
-// than CPU's.  Even then, OS-based locking is probably better.
-#define MUTEX_SPINLOCK
-
-// Define this to use the PandaFileStream interface for pifstream,
-// pofstream, and pfstream.  This is a customized file buffer that may
-// have slightly better newline handling, but its primary benefit is
-// that it supports SIMPLE_THREADS better by blocking just the active
-// "thread" when I/O is delayed, instead of blocking the entire
-// process.  Normally, there's no reason to turn this off, unless you
-// suspect a bug in Panda.
-#define USE_PANDAFILESTREAM 1
-
-// Do you want to build the PStats interface, for graphical run-time
-// performance statistics?  This requires NET to be available.  By
-// default, we don't build PStats when OPTIMIZE = 4, although this is
-// possible.
-#defer DO_PSTATS $[or $[and $[HAVE_NET],$[< $[OPTIMIZE], 4]], $[DO_PSTATS]]
-
-// Do you want to type-check downcasts?  This is a good idea during
-// development, but does impose some run-time overhead.
-#defer DO_DCAST $[< $[OPTIMIZE], 3]
-
-// Do you want to build the debugging tools for recording and
-// visualizing intersection tests by the collision system?  Enabling
-// this increases runtime collision overhead just a tiny bit.
-#defer DO_COLLISION_RECORDING $[< $[OPTIMIZE], 4]
-
-// Do you want to include the "debug" and "spam" Notify messages?
-// Normally, these are stripped out when we build with OPTIMIZE = 4, but
-// sometimes it's useful to keep them around.  Redefine this in your
-// own Config.pp to achieve that.
-#defer NOTIFY_DEBUG $[< $[OPTIMIZE], 4]
-
-// Do you want to build the audio interface?
-#define HAVE_AUDIO 1
-
-// The Tau profiler provides a multiplatform, thread-aware profiler.
-// To use it, define USE_TAU to 1, and set TAU_MAKEFILE to the
-// filename that contains the Tau-provided Makefile for your platform.
-// Then rebuild the code with ppremake; make install.  Alternatively,
-// instead of setting TAU_MAKEFILE, you can also define TAU_ROOT and
-// PDT_ROOT, to point to the root directory of the tau and pdtoolkit
-// installations, respectively; then the individual Tau components
-// will be invoked directly.  This is especially useful on Windows,
-// where there is no Tau Makefile.
-#define TAU_MAKEFILE
-#define TAU_ROOT
-#define PDT_ROOT
-#define TAU_OPTS -optKeepFiles -optRevert
-#define TAU_CFLAGS
-#define USE_TAU
-
-// Info for the RAD game tools, Miles Sound System
-// note this may be overwritten in wintools Config.pp
-#define RAD_MSS_IPATH /usr/include/Miles6/include
-#define RAD_MSS_LPATH /usr/lib/Miles6/lib/win
-#define RAD_MSS_LIBS Mss32
-#defer HAVE_RAD_MSS $[libtest $[RAD_MSS_LPATH],$[RAD_MSS_LIBS]]
-
-// Info for the Fmod audio engine
-#define FMODEX_IPATH /usr/local/fmod/api/inc
-#define FMODEX_LPATH /usr/local/fmod/api/lib
-#define FMODEX_LIBS $[if $[libtest $[FMODEX_LPATH],fmodex64],fmodex64,fmodex]
-#defer HAVE_FMODEX $[libtest $[FMODEX_LPATH],$[FMODEX_LIBS]]
-
-// Info for the OpenAL audio engine
-#define OPENAL_IPATH
-#define OPENAL_LPATH
-#if $[OSX_PLATFORM]
-  #define OPENAL_LIBS
-  #define OPENAL_FRAMEWORK OpenAL
-#else
-  #define OPENAL_LIBS openal
-  #define OPENAL_FRAMEWORK
-#endif
-#defer HAVE_OPENAL $[or $[OPENAL_FRAMEWORK],$[libtest $[OPENAL_LPATH],$[OPENAL_LIBS]]]
-
-// Info for the NVIDIA PhysX SDK
-#define PHYSX_IPATH /usr/include/PhysX/v2.8.3/SDKs/Cooking/include /usr/include/PhysX/v2.8.3/SDKs/Foundation/include /usr/include/PhysX/v2.8.3/SDKs/NxCharacter/include /usr/include/PhysX/v2.8.3/SDKs/Physics/include /usr/include/PhysX/v2.8.3/SDKs/PhysXLoader/include
-#define PHYSX_LPATH /usr/lib/PhysX/v2.8.3
-#define PHYSX_LIBS $[if $[WINDOWS_PLATFORM],PhysXLoader.lib NxCharacter.lib NxCooking.lib NxExtensions.lib,PhysXLoader NxCharacter NxCooking]
-#defer HAVE_PHYSX $[libtest $[PHYSX_LPATH],$[PHYSX_LIBS]]
-
-// Info for the SpeedTree tree and terrain rendering library.  This is
-// a commercial library that specializes in rendering trees and other
-// foliage.
-
-// This may be either "OpenGL" or "DirectX9".  Case is important, due
-// to the naming of the SpeedTree libraries.
-#define SPEEDTREE_API OpenGL
-// The local directory in which the SpeedTree SDK has been installed.
-#define SPEEDTREE_SDK_DIR
-// The default directory in which to find the SpeedTree installation at runtime.
-#defer SPEEDTREE_BIN_DIR $[SPEEDTREE_SDK_DIR]/Bin
-
-#defer SPEEDTREE_IPATH $[SPEEDTREE_SDK_DIR]/Include
-#defer SPEEDTREE_LPATH $[SPEEDTREE_SDK_DIR]/Lib/Windows/VC9$[if $[WIN64_PLATFORM],.x64]
-#defer SPEEDTREE_DEBUG $[if $[< $[OPTIMIZE], 3],_d]
-#defer SPEEDTREE_64 $[if $[WIN64_PLATFORM],64]
-
-// These names are used to build up the names of the SpeedTree libraries.
-#defer SPEEDTREE_VERSION 5.1
-#defer SPEEDTREE_LIB_SUFFIX _v$[SPEEDTREE_VERSION]_VC90MT$[SPEEDTREE_64]_Static$[SPEEDTREE_DEBUG].lib
-#if $[WINDOWS_PLATFORM]
-#defer SPEEDTREE_LIBS SpeedTreeCore$[SPEEDTREE_LIB_SUFFIX] SpeedTreeForest$[SPEEDTREE_LIB_SUFFIX] SpeedTree$[SPEEDTREE_API]Renderer$[SPEEDTREE_LIB_SUFFIX] SpeedTreeRenderInterface$[SPEEDTREE_LIB_SUFFIX] $[if $[eq $[SPEEDTREE_API],OpenGL],glew32.lib glu32.lib]
-#else
-#defer SPEEDTREE_LIBS
-#endif
-#defer HAVE_SPEEDTREE $[isdir $[SPEEDTREE_SDK_DIR]]
-
-// Is gtk+-2 installed?  This is needed to build the pstats program on
-// Unix (or non-Windows) platforms.  It is also used to provide
-// support for XEmbed for the web plugin system, which is necessary to
-// support Chromium on Linux.
-#define PKG_CONFIG pkg-config
-#define HAVE_GTK
-
-// Do we have Freetype 2.0 (or better)?  If available, this package is
-// used to generate dynamic in-the-world text from font files.
-
-// On Unix, freetype comes with the freetype-config executable, which
-// tells us where to look for the various files.  On Windows, we need to
-// supply this information explicitly.
-#defer FREETYPE_CONFIG $[if $[not $[WINDOWS_PLATFORM]],freetype-config]
-#defer HAVE_FREETYPE $[or $[libtest $[FREETYPE_LPATH],$[FREETYPE_LIBS]],$[bintest $[FREETYPE_CONFIG]]]
-
-#define FREETYPE_CFLAGS
-#define FREETYPE_IPATH
-#define FREETYPE_LPATH
-#define FREETYPE_LIBS
-
-// Define this true to compile in a default font, so every TextNode
-// will always have a font available without requiring the user to
-// specify one.  Define it empty not to do this, saving a few
-// kilobytes on the generated library.  Sorry, you can't pick a
-// particular font to be the default; it's hardcoded in the source
-// (although you can use the text-default-font prc variable to specify
-// a particular font file to load as the default, overriding the
-// compiled-in font).
-#define COMPILE_IN_DEFAULT_FONT 1
-
-// Define this true to compile a special version of Panda to use a
-// "double" floating-precision type for most internal values, such as
-// positions and transforms, instead of the standard single-precision
-// "float" type.  This does not affect the default numeric type of
-// vertices, which is controlled by the runtime config variable
-// vertices-float64.
-#define STDFLOAT_DOUBLE
-
-// We use wxWidgets--the C++ library, not the Python library--for
-// building the application p3dcert, which is needed only when
-// building the plugin/runtime system.  This uses a wx-config program,
-// similar to freetype, above.
-#defer WX_CONFIG $[if $[not $[WINDOWS_PLATFORM]],wx-config]
-#defer HAVE_WX $[or $[libtest $[WX_LPATH],$[WX_LIBS]],$[bintest $[WX_CONFIG]]]
-
-#define WX_CFLAGS
-#define WX_IPATH
-#define WX_LPATH
-#define WX_LIBS
-
-// We use FLTK--the C++ library, not the Python library--for
-// building the application p3dcert, which is needed only when
-// building the plugin/runtime system.  This uses a fltk-config program,
-// similar to freetype, above.
-#defer FLTK_CONFIG $[if $[not $[WINDOWS_PLATFORM]],fltk-config]
-#defer HAVE_FLTK $[or $[libtest $[FLTK_LPATH],$[FLTK_LIBS]],$[bintest $[FLTK_CONFIG]]]
-
-#define FLTK_CFLAGS
-#define FLTK_IPATH
-#define FLTK_LPATH
-#define FLTK_LIBS
-
-// Is Maya installed?  This matters only to programs in PANDATOOL.
-
-// Also, as of Maya 5.0 it seems the Maya library will not compile
-// properly with optimize level 4 set (we get link errors with ostream).
-
-#define MAYA_LOCATION /usr/aw/maya
-#defer MAYA_LIBS $[if $[WINDOWS_PLATFORM],Foundation.lib OpenMaya.lib OpenMayaAnim.lib OpenMayaUI.lib,Foundation OpenMaya OpenMayaAnim OpenMayaUI]
-// Optionally define this to the value of LM_LICENSE_FILE that should
-// be set before invoking Maya.
-#define MAYA_LICENSE_FILE
-#defer HAVE_MAYA $[and $[<= $[OPTIMIZE], 3],$[isdir $[MAYA_LOCATION]/include/maya]]
-// Define this if your version of Maya is earlier than 5.0 (e.g. Maya 4.5).
-#define MAYA_PRE_5_0
-
-#define MAYA2EGG maya2egg
-
-// In the same fashion as mayaegg converter above, set softimage to egg converter as well
-#define SOFTIMAGE_LOCATION /c/Softimage/sdk_18sp2/SDK_1.8SP2/SAAPHIRE
-#defer SOFTIMAGE_LIBS SAA.lib
-#defer HAVE_SOFTIMAGE $[isdir $[SOFTIMAGE_LOCATION]/h]
-
-// Is FCollada installed? This is for the daeegg converter.
-#define FCOLLADA_IPATH /usr/local/include/fcollada
-#define FCOLLADA_LPATH /usr/local/lib
-#define FCOLLADA_LIBS FColladaSD
-#defer HAVE_FCOLLADA $[libtest $[FCOLLADA_LPATH],$[FCOLLADA_LIBS]]
-
-// Is the COLLADA DOM installed? This is for the native COLLADA loader.
-// This defines the versions that your copy of COLLADA DOM supports.
-#define COLLADA14DOM_IPATH /usr/local/include/collada-dom /usr/local/include/collada-dom/1.4
-#define COLLADA14DOM_LPATH /usr/local/lib
-#define COLLADA14DOM_LIBS collada14dom xml2 boost_filesystem
-#defer HAVE_COLLADA14DOM $[libtest $[COLLADA14DOM_LPATH],$[COLLADA14DOM_LIBS]]
-
-#define COLLADA15DOM_IPATH /usr/local/include/collada-dom /usr/local/include/collada-dom/1.5
-#define COLLADA15DOM_LPATH /usr/local/lib
-#define COLLADA15DOM_LIBS collada15dom xml2 boost_filesystem
-#defer HAVE_COLLADA15DOM $[libtest $[COLLADA15DOM_LPATH],$[COLLADA15DOM_LIBS]]
-
-// The Assimp library loads various model formats.
-#define ASSIMP_IPATH /usr/local/include/assimp
-#define ASSIMP_LPATH /usr/local/lib
-#define ASSIMP_LIBS assimp
-#define HAVE_ASSIMP $[libtest $[ASSIMP_LPATH],$[ASSIMP_LIBS]]
-
-// Also for the ARToolKit library, for augmented reality
-#define ARTOOLKIT_IPATH
-#define ARTOOLKIT_LPATH
-#define ARTOOLKIT_LIBS $[if $[WINDOWS_PLATFORM],libAR.lib,AR]
-#defer HAVE_ARTOOLKIT $[libtest $[ARTOOLKIT_LPATH],$[ARTOOLKIT_LIBS]]
-
-// libRocket is a GUI library
-#define ROCKET_IPATH /usr/local/include
-#define ROCKET_LPATH /usr/local/lib
-#define ROCKET_LIBS RocketCore RocketDebugger boost_python
-#defer HAVE_ROCKET $[libtest $[ROCKET_LPATH],$[ROCKET_LIBS]]
-#defer HAVE_ROCKET_DEBUGGER $[< $[OPTIMIZE],4]
-// Unset this if you built libRocket without Python bindings
-#defer HAVE_ROCKET_PYTHON $[and $[HAVE_ROCKET],$[HAVE_PYTHON]]
-
-// Bullet is a physics engine
-#define BULLET_IPATH /usr/local/include/bullet
-#define BULLET_LPATH /usr/local/lib
-#if $[WIN64_PLATFORM]
-#define BULLET_LIBS BulletSoftBody_x64.lib BulletDynamics_x64.lib BulletCollision_x64.lib LinearMath_x64.lib
-#elif $[WINDOWS_PLATFORM]
-#define BULLET_LIBS BulletSoftBody.lib BulletDynamics.lib BulletCollision.lib LinearMath.lib
-#else
-#define BULLET_LIBS BulletSoftBody BulletDynamics BulletCollision LinearMath
-#endif
-#defer HAVE_BULLET $[libtest $[BULLET_LPATH],$[BULLET_LIBS]]
-
-// libvorbisfile is used for reading Ogg Vorbis audio files (.ogg).
-#define VORBIS_IPATH
-#define VORBIS_LPATH
-#define VORBIS_LIBS $[if $[WINDOWS_PLATFORM],libogg_static.lib libvorbis_static.lib libvorbisfile_static.lib,ogg vorbis vorbisfile]
-#defer HAVE_VORBIS $[libtest $[VORBIS_LPATH],$[VORBIS_LIBS]]
-
-// Define this to explicitly indicate the given platform string within
-// the resulting Panda runtime.  Normally it is best to leave this
-// undefined, in which case Panda will determine the best value
-// automatically.
-#define DTOOL_PLATFORM
-
-// Define this to generate static libraries and executables, rather than
-// dynamic libraries.
-//#define LINK_ALL_STATIC yes
-
-// The panda source tree is made up of a bunch of component libraries
-// (e.g. express, downloader, pgraph, egg) which are ultimately
-// combined into a smaller group of meta libraries or metalibs
-// (e.g. libpandaexpress, libpanda, libpandaegg).  Depending on your
-// build configuration, these component libraries might have their own
-// existence, or they might disappear completely and be contained
-// entirely within their metalibs.  The former is more convenient for
-// rapid development, while the latter might be more convenient for
-// distribution.
-
-// Define this variable to compile and link each component as a
-// separate library so that the resulting metalibs are small and there
-// are many separate component libraries; leave it undefined to link
-// component object files directly into their containing metalibs so
-// that the resutling metalib files are large and component libraries
-// don't actually exist.  The Windows has traditionally been built
-// with this cleared (because of the original Win32 STL requirements),
-// while the Unix build has traditionally been built with it set.
-// Changing this from the traditional platform-specific setting is not
-// 100% supported yet.
-#define BUILD_COMPONENTS $[not $[WINDOWS_PLATFORM]]
-
-// Define this to export the templates from the DLL.  This is only
-// meaningful if LINK_ALL_STATIC is not defined, and we are building
-// on Windows.  Some Windows compilers may not support this syntax.
-#defer EXPORT_TEMPLATES yes
-
-// Define this to generate .bat files when a Sources.pp makes a
-// script; leave it clear to generate Unix-style sh scripts.
-#defer MAKE_BAT_SCRIPTS $[eq $[PLATFORM],Win32]
-
-// Define USE_COMPILER to switch the particular compiler that should
-// be used.  A handful of tokens are recognized, depending on BUILD_TYPE.
-// This may also be further customized within Global.$[BUILD_TYPE].pp.
-
-// If BUILD_TYPE is "unix", this may be one of:
-//    GCC    (gcc/g++)
-//    MIPS   (Irix MIPSPro compiler)
-//
-// If BUILD_TYPE is "msvc" or "gmsvc", this may be one of:
-//    MSVC   (Microsoft Visual C++ 6.0)
-//    MSVC7  (Microsoft Visual C++ 7.0)
-//    BOUNDS (BoundsChecker)
-//    INTEL  (Intel C/C++ compiler)
-
-#if $[WINDOWS_PLATFORM]
-  #if $[eq $[USE_COMPILER],]
-    #define USE_COMPILER MSVC7
-  #endif
-#elif $[eq $[PLATFORM], Irix]
-  #define USE_COMPILER MIPS
-#elif $[eq $[PLATFORM], Linux]
-  #define USE_COMPILER GCC
-#elif $[OSX_PLATFORM]
-  #define USE_COMPILER GCC
-#elif $[eq $[PLATFORM], FreeBSD]
-  #define USE_COMPILER GCC
-#endif
-
-// Permission masks to install data and executable files,
-// respectively.  This is only meaningful for Unix systems.
-#define INSTALL_UMASK_DATA 644
-#define INSTALL_UMASK_PROG 755
-
-// How to invoke bison and flex.  Panda takes advantage of some
-// bison/flex features, and therefore specifically requires bison and
-// flex, not some other versions of yacc and lex.  However, you only
-// need to have these programs if you need to make changes to the
-// bison or flex sources (see the next point, below).
-#defer BISON bison
-#defer FLEX flex
-
-// You may not even have bison and flex installed.  If you don't, no
-// sweat; Panda ships with the pre-generated output of these programs,
-// so you don't need them unless you want to make changes to the
-// grammars themselves (files named *.yxx or *.lxx).
-#defer HAVE_BISON $[bintest $[BISON]]
-
-// How to invoke sed.  A handful of make rules use this.  Since some
-// platforms (specifically, non-Unix platforms like Windows) don't
-// have any kind of sed, ppremake performs some limited sed-like
-// functions.  The default is to use ppremake in this capacity.  In
-// this variable, $[source] is the name of the file to read, $[target]
-// is the name of the file to generate, and $[script] is the one-line
-// sed script to run.
-#defer SED ppremake -s "$[script]" <$[source] >$[target]
-
-// What directory name (within each source directory) should the .o
-// (or .obj) files be written to?  This can be any name, and it can be
-// used to differentiate different builds within the same tree.
-// However, don't define this to be '.', or you will be very sad the
-// next time you run 'make clean'.
-//#defer ODIR Opt$[OPTIMIZE]-$[PLATFORM]$[USE_COMPILER]
-// ODIR_SUFFIX is optional, usually empty
-#defer ODIR Opt$[OPTIMIZE]-$[PLATFORM]$[ODIR_SUFFIX]
-
-
-// What is the normal extension of a compiled object file?
-#if $[WINDOWS_PLATFORM]
-  #define OBJ .obj
-#else
-  #define OBJ .o
-#endif
-
-
-//////////////////////////////////////////////////////////////////////
-// There are also some additional variables that control specific
-// compiler/platform features or characteristics, defined in the
-// platform specific file Config.platform.pp.  Be sure to inspect
-// these variables for correctness too.
-//////////////////////////////////////////////////////////////////////

+ 0 - 722
dtool/LocalSetup.pp

@@ -1,722 +0,0 @@
-//
-// LocalSetup.pp
-//
-// This file contains further instructions to set up the DTOOL package
-// when using ppremake.  In particular, it creates the dtool_config.h
-// file based on the user's selected configure variables.  This script
-// need not execute when BUILD_TYPE is "autoconf"; in this case, the
-// dtool_config.h file will automatically be correctly generated by
-// configure.
-//
-
-#print
-#print Configuring support for the following optional third-party packages:
-#if $[HAVE_EIGEN]
-#print + Eigen linear algebra library
-#if $[LINMATH_ALIGN]
-#print +   (vectorization enabled in build)
-#else
-#print -   (vectorization NOT enabled in build)
-#endif
-#else
-#print - Did not find Eigen linear algebra library
-#endif
-#if $[HAVE_OPENSSL]
-#print + OpenSSL
-#else
-#print - Did not find OpenSSL
-#endif
-#if $[HAVE_JPEG]
-#print + libjpeg
-#else
-#print - Did not find libjpeg
-#endif
-#if $[HAVE_PNG]
-#print + libpng
-#else
-#print - Did not find libpng
-#endif
-#if $[HAVE_TIFF]
-#print + libtiff
-#else
-#print - Did not find libtiff
-#endif
-#if $[HAVE_TAR]
-#print + libtar
-#else
-#print - Did not find libtar
-#endif
-#if $[HAVE_FFTW]
-#print + fftw
-#else
-#print - Did not find fftw
-#endif
-#if $[HAVE_SQUISH]
-#print + squish
-#else
-#print - Did not find squish
-#endif
-#if $[HAVE_CG]
-#print + Nvidia Cg High Level Shading Language
-#else
-#print - Did not find Nvidia Cg High Level Shading Language
-#endif
-#if $[HAVE_CGGL]
-#print + Cg OpenGL API
-#else
-#print - Did not find Cg OpenGL API
-#endif
-#if $[HAVE_CGDX9]
-#print + Cg DX9 API
-#else
-#print - Did not find Cg DX9 API
-#endif
-#if $[HAVE_CGDX10]
-#print + Cg DX10 API
-#else
-#print - Did not find Cg DX10 API
-#endif
-#if $[HAVE_VRPN]
-#print + VRPN
-#else
-#print - Did not find VRPN
-#endif
-#if $[HAVE_ZLIB]
-#print + zlib
-#else
-#print - Did not find zlib
-#endif
-#if $[HAVE_RAD_MSS]
-#print + Miles Sound System
-#else
-#print - Did not find Miles Sound System
-#endif
-#if $[HAVE_FMODEX]
-#print + FMOD Ex sound library
-#else
-#print - Did not find FMOD Ex sound library
-#endif
-#if $[HAVE_OPENAL]
-#print + OpenAL sound library
-#else
-#print - Did not find OpenAL sound library
-#endif
-#if $[HAVE_PHYSX]
-#print + Ageia PhysX
-#else
-#print - Did not find Ageia PhysX
-#endif
-#if $[HAVE_SPEEDTREE]
-#print + SpeedTree
-#else
-#print - Did not find SpeedTree
-#endif
-#if $[HAVE_GTK]
-#print + gtk+-2
-#else
-#print - Did not find gtk+-2
-#endif
-#if $[HAVE_FREETYPE]
-#print + Freetype
-#else
-#print - Did not find Freetype
-#endif
-#if $[HAVE_WX]
-#print + WxWidgets
-#else
-#print - Did not find WxWidgets
-#endif
-#if $[HAVE_FLTK]
-#print + FLTK
-#else
-#print - Did not find FLTK
-#endif
-#if $[HAVE_GL]
-#print + OpenGL
-#else
-#print - Did not find OpenGL
-#endif
-#if $[HAVE_GLES]
-#print + OpenGL ES 1
-#else
-#print - Did not find OpenGL ES 1
-#endif
-#if $[HAVE_GLES2]
-#print + OpenGL ES 2
-#else
-#print - Did not find OpenGL ES 2
-#endif
-#if $[HAVE_DX9]
-#print + DirectX9
-#else
-#print - Did not find DirectX9
-#endif
-#if $[HAVE_TINYDISPLAY]
-#print + Tinydisplay
-#else
-#print - Not building Tinydisplay
-#endif
-//#if $[HAVE_SDL]
-//#print + SDL
-//#else
-//#print - Did not find SDL
-//#endif
-#if $[HAVE_X11]
-#print + X11
-#else
-#print - Did not find X11
-#endif
-#if $[HAVE_OPENCV]
-#print + OpenCV
-#else
-#print - Did not find OpenCV
-#endif
-#if $[HAVE_FFMPEG]
-#print + FFMPEG
-#else
-#print - Did not find FFMPEG
-#endif
-#if $[HAVE_ODE]
-#print + ODE
-#else
-#print - Did not find ODE
-#endif
-#if $[HAVE_AWESOMIUM]
-#print + AWESOMIUM
-#else
-#print - Did not find AWESOMIUM
-#endif
-#if $[HAVE_MAYA]
-#print + OpenMaya
-#else
-#print - Did not find OpenMaya
-#endif
-#if $[HAVE_FCOLLADA]
-#print + FCollada
-#else
-#print - Did not find FCollada
-#endif
-#if $[or $[HAVE_COLLADA14DOM],$[HAVE_COLLADA15DOM]]
-#print + COLLADA DOM
-#else
-#print - Did not find COLLADA DOM
-#endif
-#if $[HAVE_ASSIMP]
-#print + Assimp
-#else
-#print - Did not find Assimp
-#endif
-#if $[HAVE_ARTOOLKIT]
-#print + ARToolKit
-#else
-#print - Did not find ARToolKit
-#endif
-#if $[HAVE_ROCKET]
-#if $[HAVE_ROCKET_PYTHON]
-#print + libRocket with Python bindings
-#else
-#print + libRocket without Python bindings
-#endif
-#else
-#print - Did not find libRocket
-#endif
-#if $[HAVE_BULLET]
-#print + Bullet Physics
-#else
-#print - Did not find Bullet Physics
-#endif
-#if $[HAVE_VORBIS]
-#print + libvorbis (Ogg Vorbis Decoder)
-#else
-#print - Did not find libvorbis (Ogg Vorbis Decoder)
-#endif
-
-#print
-#if $[and $[HAVE_INTERROGATE],$[HAVE_PYTHON]]
-#print Compilation will generate Python interfaces.
-#else
-#print Configuring Panda WITHOUT Python interfaces.
-#endif
-#if $[HAVE_THREADS]
-#if $[SIMPLE_THREADS]
-#print Compilation will include simulated threading support.
-#else
-#if $[DO_PIPELINING]
-#print Compilation will include full, pipelined threading support.
-#else
-#print Compilation will include nonpipelined threading support.
-#endif
-#endif
-#else
-#print Configuring Panda without threading support.
-#endif
-
-#if $[OSX_PLATFORM]
-#if $[UNIVERSAL_BINARIES]
-#print Compilation will create universal binaries.
-#else
-#print Compilation will not create universal binaries.
-#endif
-#endif
-
-#print
-#print See dtool_config.h for more details about the specified configuration.
-
-#print
-
-// We don't include the ppremake version in the first comment line of
-// the output to dtool_config.h, below, to minimize unnecessary
-// complete rebuilds due to an updated ppremake version.
-#output dtool_config.h notouch
-#format straight
-/* dtool_config.h.  Generated automatically by $[PPREMAKE] from $[SOURCEFILE]. */
-
-/* Debug / non-debug symbols.  OPTIMIZE = $[OPTIMIZE] */
-#if $[<= $[OPTIMIZE],2]
-#define _DEBUG 1
-#elif $[= $[OPTIMIZE],4]
-#define NDEBUG 1
-#endif
-$[cdefine _DEBUG]
-$[cdefine NDEBUG]
-
-/* Define if we have Eigen available. */
-$[cdefine HAVE_EIGEN]
-$[cdefine LINMATH_ALIGN]
-
-/* Define if we have Python installed.  */
-$[cdefine HAVE_PYTHON]
-$[cdefine USE_DEBUG_PYTHON]
-/* Define if we have Python as a framework (Mac OS X).  */
-$[cdefine PYTHON_FRAMEWORK]
-
-/* Define if we have RAD game tools, Miles Sound System installed.  */
-$[cdefine HAVE_RAD_MSS]
-
-/* Define if we have Freetype 2.0 or better available. */
-$[cdefine HAVE_FREETYPE]
-
-/* Define if we want to compile in a default font. */
-$[cdefine COMPILE_IN_DEFAULT_FONT]
-
-/* Define to use doubles for most numbers, intead of single-precision floats. */
-$[cdefine STDFLOAT_DOUBLE]
-
-/* Define if we have Maya available. */
-$[cdefine HAVE_MAYA]
-$[cdefine MAYA_PRE_5_0]
-
-/* Define if we have libRocket available and built with the Rocket Debugger. */
-$[cdefine HAVE_ROCKET_DEBUGGER]
-
-/* Define if we have built libRocket available and built with Python support. */
-$[cdefine HAVE_ROCKET_PYTHON]
-
-/* Define if we have SoftImage available. */
-$[cdefine HAVE_SOFTIMAGE]
-
-/* Define if we have FCollada available. */
-$[cdefine HAVE_FCOLLADA]
-
-/* Define if we have ARToolKit available. */
-$[cdefine HAVE_ARTOOLKIT]
-
-/* Define if we have libvorbisfile available. */
-$[cdefine HAVE_VORBIS]
-
-/* Define if we have OpenSSL installed.  */
-$[cdefine HAVE_OPENSSL]
-$[cdefine REPORT_OPENSSL_ERRORS]
-
-/* Define if we have libjpeg installed.  */
-$[cdefine HAVE_JPEG]
-$[cdefine PHAVE_JPEGINT_H]
-
-/* Define to build video-for-linux. */
-$[cdefine HAVE_VIDEO4LINUX]
-
-/* Define if we have libpng installed.  */
-$[cdefine HAVE_PNG]
-
-/* Define if we have libtiff installed.  */
-$[cdefine HAVE_TIFF]
-
-/* Define if we want to build these other image file formats. */
-$[cdefine HAVE_SGI_RGB]
-$[cdefine HAVE_TGA]
-$[cdefine HAVE_IMG]
-$[cdefine HAVE_SOFTIMAGE_PIC]
-$[cdefine HAVE_BMP]
-$[cdefine HAVE_PNM]
-
-/* Define if we have libtar installed.  */
-$[cdefine HAVE_TAR]
-
-/* Define if we have libfftw installed.  */
-$[cdefine HAVE_FFTW]
-
-/* Define if we have libsquish installed.  */
-$[cdefine HAVE_SQUISH]
-
-/* Define if we have Berkeley DB installed.  */
-$[cdefine HAVE_BDB]
-
-/* Define if we have HELIX installed.  */
-$[cdefine HAVE_HELIX]
-
-/* Define if we have CG installed.  */
-$[cdefine HAVE_CG]
-
-/* Define if we have CGGL installed.  */
-$[cdefine HAVE_CGGL]
-
-/* Define if we have CGDX9 installed.  */
-$[cdefine HAVE_CGDX9]
-
-/* Define if we have CGDX10 installed.  */
-$[cdefine HAVE_CGDX10]
-
-/* Define for dxerr.h instead of dxerr9.h. */
-$[cdefine USE_GENERIC_DXERR_LIBRARY]
-
-/* Define if we have zlib installed.  */
-$[cdefine HAVE_ZLIB]
-
-/* Define the preconfigured minimum GL version number.  */
-#if HAVE_GL
-# define MIN_GL_VERSION_MAJOR $[word 1,$[MIN_GL_VERSION]]
-# define MIN_GL_VERSION_MINOR $[word 2,$[MIN_GL_VERSION]]
-#endif
-
-/* Define if we have OpenCV installed and want to build for OpenCV.  */
-$[cdefine HAVE_OPENCV]
-$[cdefine OPENCV_VER_23]
-
-/* Define if we have FFMPEG installed and want to build for FFMPEG.  */
-$[cdefine HAVE_FFMPEG]
-$[cdefine HAVE_SWSCALE]
-$[cdefine HAVE_SWRESAMPLE]
-
-/* Define if we have AWESOMIUM installed and want to build for AWESOMIUM.  */
-$[cdefine HAVE_AWESOMIUM]
-
-/* Define if we have GLX installed and want to build for GLX.  */
-$[cdefine HAVE_GLX]
-
-/* Define if we have EGL installed and want to build for EGL.  */
-$[cdefine HAVE_EGL]
-
-/* Define if we have Windows-GL installed and want to build for Wgl.  */
-$[cdefine HAVE_WGL]
-
-/* Define if we have DirectX installed and want to build for DX.  */
-$[cdefine HAVE_DX9]
-
-/* The choice of generic vs. the specific dxerr library largely
-   depends on which SDK you have installed. */
-$[cdefine USE_GENERIC_DXERR_LIBRARY]
-
-/* Define if we want to build tinydisplay. */
-$[cdefine HAVE_TINYDISPLAY]
-
-/* Define if we have the SDL library. */
-$[cdefine HAVE_SDL]
-
-/* Define if we have X11. */
-$[cdefine HAVE_X11]
-
-/* Define if we have the XFree86-DGA extension. */
-$[cdefine HAVE_XF86DGA]
-
-/* Define if we have the XRandR extension. */
-$[cdefine HAVE_XRANDR]
-
-/* Define if we have the XCursor extension. */
-$[cdefine HAVE_XCURSOR]
-
-/* Define if we want to compile the threading code.  */
-$[cdefine HAVE_THREADS]
-
-/* Define if we want to use fast, user-space simulated threads.  */
-$[cdefine SIMPLE_THREADS]
-
-/* Define if SIMPLE_THREADS should be implemented with the OS-provided
-   threading layer (if available). */
-$[cdefine OS_SIMPLE_THREADS]
-
-/* Define to enable deadlock detection, mutex recursion checks, etc. */
-$[cdefine DEBUG_THREADS]
-
-/* Define to implement mutexes and condition variables via a user-space spinlock. */
-$[cdefine MUTEX_SPINLOCK]
-
-/* Define to enable the PandaFileStream implementation of pfstream etc. */
-$[cdefine USE_PANDAFILESTREAM]
-
-/* Define if we want to compile the net code.  */
-$[cdefine HAVE_NET]
-
-/* Define if we want to compile the egg code.  */
-$[cdefine HAVE_EGG]
-
-/* Define if we want to compile the audio code.  */
-$[cdefine HAVE_AUDIO]
-
-/* Define if we have bison and flex available. */
-$[cdefine HAVE_BISON]
-
-/* Define if we want to use PStats.  */
-$[cdefine DO_PSTATS]
-
-/* Define if we want to type-check downcasts.  */
-$[cdefine DO_DCAST]
-
-/* Define if we want to provide collision system recording and
-   visualization tools. */
-$[cdefine DO_COLLISION_RECORDING]
-
-/* Define if we want to enable track-memory-usage.  */
-$[cdefine DO_MEMORY_USAGE]
-
-/* Define if we want to enable min-lag and max-lag.  */
-$[cdefine SIMULATE_NETWORK_DELAY]
-
-/* Define if we want to allow immediate mode OpenGL rendering.  */
-$[cdefine SUPPORT_IMMEDIATE_MODE]
-
-/* Define for either of the alternative malloc schemes. */
-$[cdefine USE_MEMORY_DLMALLOC]
-$[cdefine USE_MEMORY_PTMALLOC2]
-
-/* Define if we want to compile in support for pipelining.  */
-$[cdefine DO_PIPELINING]
-
-/* Define if we want to keep Notify debug messages around, or undefine
-   to compile them out.  */
-$[cdefine NOTIFY_DEBUG]
-
-/* Define if we want to export template classes from the DLL.  Only
-   makes sense to MSVC++. */
-$[cdefine EXPORT_TEMPLATES]
-
-/* Define if we are linking PANDAPHYSX in with PANDA. */
-$[cdefine LINK_IN_PHYSX]
-
-/* The compiled-in character(s) to expect to separate different
-   components of a path list (e.g. $PRC_PATH). */
-# define DEFAULT_PATHSEP "$[DEFAULT_PATHSEP]"
-
-/* Many of the prc variables are exported by
-   dtool/src/prc/prc_parameters.h.pp, instead of here.  Only those prc
-   variables that must be visible outside of the prc directory are
-   exported here. */
-
-/* The filename that specifies the public keys to import into
-   config. */
-# define PRC_PUBLIC_KEYS_FILENAME "$[unixfilename $[PRC_PUBLIC_KEYS_FILENAME]]"
-#if $[PRC_PUBLIC_KEYS_FILENAME]
-# define PRC_PUBLIC_KEYS_INCLUDE "$[osfilename $[PRC_PUBLIC_KEYS_FILENAME]]"
-#endif
-
-/* Define if you want to save the descriptions for ConfigVariables. */
-$[cdefine PRC_SAVE_DESCRIPTIONS]
-
-
-/* Define if your processor stores words with the most significant
-   byte first (like Motorola and SPARC, unlike Intel and VAX).  */
-$[cdefine WORDS_BIGENDIAN]
-
-/* Define if the C++ compiler uses namespaces.  */
-$[cdefine HAVE_NAMESPACE]
-
-/* Define if fstream::open() accepts a third parameter for umask. */
-$[cdefine HAVE_OPEN_MASK]
-
-/* Define if we have a lockf() function. */
-$[cdefine HAVE_LOCKF]
-
-/* Define if some header file defines wchar_t. */
-$[cdefine HAVE_WCHAR_T]
-
-/* Define if the <string> header file defines wstring. */
-$[cdefine HAVE_WSTRING]
-
-/* Define if the C++ compiler supports the typename keyword.  */
-$[cdefine HAVE_TYPENAME]
-
-/* Define if we can trust the compiler not to insert extra bytes in
-   structs between base structs and derived structs. */
-$[cdefine SIMPLE_STRUCT_POINTERS]
-
-/* Define if we have Dinkumware STL installed.  */
-$[cdefine HAVE_DINKUM]
-
-/* Define if we have STL hash_map etc. available  */
-$[cdefine HAVE_STL_HASH]
-
-/* Define if gettimeofday() takes only one parameter. */
-$[cdefine GETTIMEOFDAY_ONE_PARAM]
-
-/* Define if you have the getopt function.  */
-$[cdefine HAVE_GETOPT]
-
-/* Define if you have the getopt_long_only function.  */
-$[cdefine HAVE_GETOPT_LONG_ONLY]
-
-/* Define if getopt appears in getopt.h.  */
-$[cdefine PHAVE_GETOPT_H]
-
-/* Define if you have ioctl(TIOCGWINSZ) to determine terminal width. */
-$[cdefine IOCTL_TERMINAL_WIDTH]
-
-/* Do the system headers define a "streamsize" typedef? */
-$[cdefine HAVE_STREAMSIZE]
-
-/* Do the system headers define key ios typedefs like ios::openmode
-   and ios::fmtflags? */
-$[cdefine HAVE_IOS_TYPEDEFS]
-
-/* Define if the C++ iostream library defines ios::binary.  */
-$[cdefine HAVE_IOS_BINARY]
-
-/* Can we safely call getenv() at static init time? */
-$[cdefine STATIC_INIT_GETENV]
-
-/* Can we read the file /proc/self/[*] to determine our
-   environment variables at static init time? */
-$[cdefine HAVE_PROC_SELF_EXE]
-$[cdefine HAVE_PROC_SELF_MAPS]
-$[cdefine HAVE_PROC_SELF_ENVIRON]
-$[cdefine HAVE_PROC_SELF_CMDLINE]
-$[cdefine HAVE_PROC_CURPROC_FILE]
-$[cdefine HAVE_PROC_CURPROC_MAP]
-$[cdefine HAVE_PROC_CURPROC_CMDLINE]
-
-/* Do we have a global pair of argc/argv variables that we can read at
-   static init time?  Should we prototype them?  What are they called? */
-$[cdefine HAVE_GLOBAL_ARGV]
-$[cdefine PROTOTYPE_GLOBAL_ARGV]
-$[cdefine GLOBAL_ARGV]
-$[cdefine GLOBAL_ARGC]
-
-/* Define if you have the <io.h> header file.  */
-$[cdefine PHAVE_IO_H]
-
-/* Define if you have the <iostream> header file.  */
-$[cdefine PHAVE_IOSTREAM]
-
-/* Define if you have the <malloc.h> header file.  */
-$[cdefine PHAVE_MALLOC_H]
-
-/* Define if you have the <sys/malloc.h> header file.  */
-$[cdefine PHAVE_SYS_MALLOC_H]
-
-/* Define if you have the <alloca.h> header file.  */
-$[cdefine PHAVE_ALLOCA_H]
-
-/* Define if you have the <locale.h> header file.  */
-$[cdefine PHAVE_LOCALE_H]
-
-/* Define if you have the <string.h> header file.  */
-$[cdefine PHAVE_STRING_H]
-
-/* Define if you have the <stdlib.h> header file.  */
-$[cdefine PHAVE_STDLIB_H]
-
-/* Define if you have the <limits.h> header file.  */
-$[cdefine PHAVE_LIMITS_H]
-
-/* Define if you have the <minmax.h> header file.  */
-$[cdefine PHAVE_MINMAX_H]
-
-/* Define if you have the <sstream> header file.  */
-$[cdefine PHAVE_SSTREAM]
-
-/* Define if you have the <new> header file.  */
-$[cdefine PHAVE_NEW]
-
-/* Define if you have the <sys/types.h> header file.  */
-$[cdefine PHAVE_SYS_TYPES_H]
-
-/* Define if you have the <sys/time.h> header file.  */
-$[cdefine PHAVE_SYS_TIME_H]
-
-/* Define if you have the <unistd.h> header file.  */
-$[cdefine PHAVE_UNISTD_H]
-
-/* Define if you have the <utime.h> header file.  */
-$[cdefine PHAVE_UTIME_H]
-
-/* Define if you have the <glob.h> header file.  */
-$[cdefine PHAVE_GLOB_H]
-
-/* Define if you have the <dirent.h> header file.  */
-$[cdefine PHAVE_DIRENT_H]
-
-/* Define if you have the <drfftw.h> header file.  */
-$[cdefine PHAVE_DRFFTW_H]
-
-/* Do we have <sys/soundcard.h> (and presumably a Linux-style audio
-   interface)? */
-$[cdefine PHAVE_SYS_SOUNDCARD_H]
-
-/* Do we have <ucontext.h> (and therefore makecontext() /
-   swapcontext())? */
-$[cdefine PHAVE_UCONTEXT_H]
-
-/* Do we have <linux/input.h> ? This enables us to use raw mouse input. */
-$[cdefine PHAVE_LINUX_INPUT_H]
-
-/* Do we have <stdint.h>? */
-$[cdefine PHAVE_STDINT_H]
-
-/* Do we have RTTI (and <typeinfo>)? */
-$[cdefine HAVE_RTTI]
-
-/* Do we have Posix threads? */
-$[cdefine HAVE_POSIX_THREADS]
-
-/* Is the code being compiled with the Tau profiler's instrumentor? */
-$[cdefine USE_TAU]
-
-/* Define if needed to have 64-bit file i/o */
-$[cdefine __USE_LARGEFILE64]
-
-// To activate the DELETED_CHAIN macros.
-$[cdefine USE_DELETED_CHAIN]
-
-// To build the Windows TOUCHINPUT interfaces (requires Windows 7).
-$[cdefine HAVE_WIN_TOUCHINPUT]
-
-// If we are to build the native net interfaces.
-$[cdefine WANT_NATIVE_NET]
-
-/* Turn off warnings for using scanf and such */
-#if $[or $[eq $[USE_COMPILER],MSVC9], $[eq $[USE_COMPILER],MSVC9x64]]
-        #print Will ignore CRT_SECURE warnings for MSVC9
-        $[cdefine _CRT_SECURE_NO_WARNINGS]
-        # pragma warning( disable : 4996 4275 4267 4099 4049 4013 4005 )
-#endif
-
-/* Static linkage instead of the normal dynamic linkage? */
-$[cdefine LINK_ALL_STATIC]
-
-/* Define to compile the plugin code. */
-$[cdefine HAVE_P3D_PLUGIN]
-
-/* Define to compile for Cocoa or Carbon on Mac OS X. */
-$[cdefine HAVE_COCOA]
-$[cdefine HAVE_CARBON]
-
-/* Platform-identifying defines. */
-$[cdefine IS_OSX]
-$[cdefine IS_LINUX]
-$[cdefine IS_FREEBSD]
-$[cdefine BUILD_IPHONE]
-$[cdefine UNIVERSAL_BINARIES]
-
-#if $[DTOOL_PLATFORM]
-# define DTOOL_PLATFORM "$[DTOOL_PLATFORM]"
-#endif
-
-#end dtool_config.h

+ 0 - 439
dtool/Package.pp

@@ -1,439 +0,0 @@
-//
-// Package.pp
-//
-// This file defines certain configuration variables that are to be
-// written into the various make scripts.  It is processed by ppremake
-// (along with the Sources.pp files in each of the various
-// directories) to generate build scripts appropriate to each
-// environment.
-//
-// This is the package-specific file, which should be at the top of
-// every source hierarchy.  It generally gets the ball rolling, and is
-// responsible for explicitly including all of the relevant Config.pp
-// files.
-
-// Check the version of ppremake in use.
-#if $[< $[PPREMAKE_VERSION],1.11]
-  #error You need at least ppremake version 1.11 to process this tree.
-#endif
-
-// Get the current version info for Panda.
-#include $[THISDIRPREFIX]PandaVersion.pp
-#define PANDA_MAJOR_VERSION $[word 1,$[PANDA_VERSION]]
-#define PANDA_MINOR_VERSION $[word 2,$[PANDA_VERSION]]
-#define PANDA_SEQUENCE_VERSION $[word 3,$[PANDA_VERSION]]
-#defer PANDA_VERSION_STR $[PANDA_MAJOR_VERSION].$[PANDA_MINOR_VERSION].$[PANDA_SEQUENCE_VERSION]$[if $[not $[PANDA_OFFICIAL_VERSION]],c]
-#defer PANDA_VERSION_SYMBOL panda_version_$[PANDA_MAJOR_VERSION]_$[PANDA_MINOR_VERSION]
-
-// The panda version as a single number, with three digits reserved
-// for each component.
-#define PANDA_NUMERIC_VERSION $[+ $[* $[PANDA_MAJOR_VERSION],1000000],$[* $[PANDA_MINOR_VERSION],1000],$[PANDA_SEQUENCE_VERSION]]
-
-#define P3D_PLUGIN_MAJOR_VERSION $[word 1,$[P3D_PLUGIN_VERSION]]
-#define P3D_PLUGIN_MINOR_VERSION $[word 2,$[P3D_PLUGIN_VERSION]]
-#define P3D_PLUGIN_SEQUENCE_VERSION $[word 3,$[P3D_PLUGIN_VERSION]]
-#defer P3D_PLUGIN_VERSION_STR $[P3D_PLUGIN_MAJOR_VERSION].$[P3D_PLUGIN_MINOR_VERSION].$[P3D_PLUGIN_SEQUENCE_VERSION]$[if $[not $[PANDA_OFFICIAL_VERSION]],c]
-
-// The plugin version as a dot-delimited integer quad, according to MS
-// conventions for DLL version numbers.
-#defer P3D_PLUGIN_DLL_DOT_VERSION $[word 1,$[P3D_PLUGIN_VERSION]].$[word 2,$[P3D_PLUGIN_VERSION]].$[word 3,$[P3D_PLUGIN_VERSION]].$[if $[PANDA_OFFICIAL_VERSION],1000,0]
-// The same thing as a comma-delimited quad.
-#defer P3D_PLUGIN_DLL_COMMA_VERSION $[word 1,$[P3D_PLUGIN_VERSION]],$[word 2,$[P3D_PLUGIN_VERSION]],$[word 3,$[P3D_PLUGIN_VERSION]],$[if $[PANDA_OFFICIAL_VERSION],1000,0]
-
-// What is the name of this source tree?
-#if $[eq $[PACKAGE],]
-  #define PACKAGE dtool
-#endif
-
-// Where should we install DTOOL, specifically?
-#if $[DTOOL_INSTALL]
-  #define DTOOL_INSTALL $[unixfilename $[DTOOL_INSTALL]]
-#elif $[CTPROJS]
-  // If we are presently attached, use the environment variable.
-  // We define two variables: one for ourselves, which burns in the
-  // current value of the DTOOL environment variable (so that any
-  // attempt to install in this tree will install correctly, no
-  // matter whether we are attached to a different DTOOL later by
-  // mistake), and one for other trees to use, which expands to a
-  // ordinary reference to the DTOOL environment variable, so
-  // they will read from the right tree no matter which DTOOL they're
-  // attached to.
-  #set DTOOL $[unixfilename $[DTOOL]]
-  #define DTOOL_INSTALL $[DTOOL]/built
-  #if $[eq $[DTOOL],]
-    #error You seem to be attached to some trees, but not DTOOL!
-  #endif
-#else
-  // Otherwise, if we are not attached, install in the standard place
-  // (unless the user specifies otherwise).
-  #defer DTOOL_INSTALL $[unixfilename $[INSTALL_DIR]]
-#endif
-
-
-// These variables tell ppremake how to interpret the contents of the
-// PLATFORM variable, and help it to control the effects of functions
-// like $[os] and $[isfullpath].
-
-// True if we are specifically 32-bit Windows.
-#define WIN32_PLATFORM $[or $[eq $[PLATFORM],Win32],$[eq $[PLATFORM],Cygwin]]
-
-// True if we are 64-bit windows.
-#define WIN64_PLATFORM $[or $[eq $[PLATFORM],Win64],$[eq $[PLATFORM],Cygwin64]]
-
-// True if we are building on some flavor of Windows.
-#define WINDOWS_PLATFORM $[or $[WIN32_PLATFORM],$[WIN64_PLATFORM]]
-
-// True if we are building on some flavor of OS X.
-#define OSX_PLATFORM $[eq $[PLATFORM],OSX]
-
-// True if we are building on some flavor of Unix.
-#define UNIX_PLATFORM $[and $[not $[WINDOWS_PLATFORM]],$[not $[OSX_PLATFORM]]]
-
-
-
-// Pull in the package-level Config file.  This contains a lot of
-// configuration variables that the user might want to fine-tune.
-#include $[THISDIRPREFIX]Config.pp
-
-// Also get the platform-specific config file.  This defines a few
-// more variables that are more likely to be platform-dependent and
-// are less likely to be directly modified by the user.
-#include $[THISDIRPREFIX]Config.$[PLATFORM].pp
-
-// If the environment variable PPREMAKE_CONFIG is set, it points to a
-// user-customized Config.pp file, for instance in the user's home
-// directory.  This file might redefine any of the variables defined
-// above.
-#if $[ne $[PPREMAKE_CONFIG],]
-  #define PPREMAKE_CONFIG $[unixfilename $[PPREMAKE_CONFIG]]
-  #print Reading $[PPREMAKE_CONFIG] (referred to by PPREMAKE_CONFIG)
-  #include $[PPREMAKE_CONFIG]
-
-#elif $[wildcard $[unixfilename $[INSTALL_DIR]]/Config.pp]
-  // If the PPREMAKE_CONFIG variable is not, but there exists a
-  // Config.pp in the compiled-in INSTALL_DIR, use that one by default.
-  #define PPREMAKE_CONFIG $[unixfilename $[INSTALL_DIR]]/Config.pp
-  #print Reading $[PPREMAKE_CONFIG] (referred to by INSTALL_DIR, because PPREMAKE_CONFIG is empty)
-  #include $[PPREMAKE_CONFIG]
-
-#else
-  // Otherwise, just carry on without it.
-  #print Environment variable PPREMAKE_CONFIG not set; using defaults.
-#endif
-
-#include $[THISDIRPREFIX]pptempl/PostConfig.pp
-
-// Now evaluate all of our deferred variable definitions from
-// Config.pp.
-#set EIGEN_IPATH $[unixfilename $[EIGEN_IPATH]]
-#set HAVE_EIGEN $[HAVE_EIGEN]
-
-#set PYTHON_IPATH $[unixfilename $[PYTHON_IPATH]]
-#set PYTHON_LPATH $[unixfilename $[PYTHON_LPATH]]
-#set PYTHON_FPATH $[unixfilename $[PYTHON_FPATH]]
-#set PYTHON_FRAMEWORK $[unixfilename $[PYTHON_FRAMEWORK]]
-#set HAVE_PYTHON $[HAVE_PYTHON]
-
-#set NATIVE_NET_IPATH $[unixfilename $[NATIVE_NET_IPATH]]
-#set NATIVE_NET_LPATH $[unixfilename $[NATIVE_NET_LPATH]]
-#set NATIVE_NET_LIBS $[NATIVE_NET_LIBS]
-#set WANT_NATIVE_NET $[WANT_NATIVE_NET]
-
-#set HAVE_NET $[HAVE_NET]
-
-#set OPENSSL_IPATH $[unixfilename $[OPENSSL_IPATH]]
-#set OPENSSL_LPATH $[unixfilename $[OPENSSL_LPATH]]
-#set OPENSSL_LIBS $[OPENSSL_LIBS]
-#set HAVE_OPENSSL $[HAVE_OPENSSL]
-
-#set JPEG_IPATH $[unixfilename $[JPEG_IPATH]]
-#set JPEG_LPATH $[unixfilename $[JPEG_LPATH]]
-#set JPEG_LIBS $[JPEG_LIBS]
-#set HAVE_JPEG $[HAVE_JPEG]
-
-#set PNG_IPATH $[unixfilename $[PNG_IPATH]]
-#set PNG_LPATH $[unixfilename $[PNG_LPATH]]
-#set PNG_LIBS $[PNG_LIBS]
-#set HAVE_PNG $[HAVE_PNG]
-
-#set TIFF_IPATH $[unixfilename $[TIFF_IPATH]]
-#set TIFF_LPATH $[unixfilename $[TIFF_LPATH]]
-#set TIFF_LIBS $[TIFF_LIBS]
-#set HAVE_TIFF $[HAVE_TIFF]
-
-#set TAR_IPATH $[unixfilename $[TAR_IPATH]]
-#set TAR_LPATH $[unixfilename $[TAR_LPATH]]
-#set TAR_LIBS $[TAR_LIBS]
-#set HAVE_TAR $[HAVE_TAR]
-
-#set FFTW_IPATH $[unixfilename $[FFTW_IPATH]]
-#set FFTW_LPATH $[unixfilename $[FFTW_LPATH]]
-#set FFTW_LIBS $[FFTW_LIBS]
-#set HAVE_FFTW $[HAVE_FFTW]
-
-#set SQUISH_IPATH $[unixfilename $[SQUISH_IPATH]]
-#set SQUISH_LPATH $[unixfilename $[SQUISH_LPATH]]
-#set SQUISH_LIBS $[SQUISH_LIBS]
-#set HAVE_SQUISH $[HAVE_SQUISH]
-
-#set BDB_IPATH $[unixfilename $[BDB_IPATH]]
-#set BDB_LPATH $[unixfilename $[BDB_LPATH]]
-#set BDB_LIBS $[BDB_LIBS]
-#set HAVE_BDB $[HAVE_BDB]
-
-#set CG_IPATH $[unixfilename $[CG_IPATH]]
-#set CG_LPATH $[unixfilename $[CG_LPATH]]
-#set CG_LIBS $[CG_LIBS]
-#set HAVE_CG $[HAVE_CG]
-
-#set CGGL_IPATH $[unixfilename $[CGGL_IPATH]]
-#set CGGL_LPATH $[unixfilename $[CGGL_LPATH]]
-#set CGGL_LIBS $[CGGL_LIBS]
-#set HAVE_CGGL $[HAVE_CGGL]
-
-#set CGDX9_IPATH $[unixfilename $[CGDX9_IPATH]]
-#set CGDX9_LPATH $[unixfilename $[CGDX9_LPATH]]
-#set CGDX9_LIBS $[CGDX9_LIBS]
-#set HAVE_CGDX9 $[HAVE_CGDX9]
-
-#set CGDX10_IPATH $[unixfilename $[CGDX10_IPATH]]
-#set CGDX10_LPATH $[unixfilename $[CGDX10_LPATH]]
-#set CGDX10_LIBS $[CGDX10_LIBS]
-#set HAVE_CGDX10 $[HAVE_CGDX10]
-
-#set VRPN_IPATH $[unixfilename $[VRPN_IPATH]]
-#set VRPN_LPATH $[unixfilename $[VRPN_LPATH]]
-#set VRPN_LIBS $[VRPN_LIBS]
-#set HAVE_VRPN $[HAVE_VRPN]
-
-#set HELIX_IPATH $[unixfilename $[HELIX_IPATH]]
-#set HELIX_LPATH $[unixfilename $[HELIX_LPATH]]
-#set HELIX_LIBS $[HELIX_LIBS]
-#set HAVE_HELIX $[HAVE_HELIX]
-
-#set ZLIB_IPATH $[unixfilename $[ZLIB_IPATH]]
-#set ZLIB_LPATH $[unixfilename $[ZLIB_LPATH]]
-#set ZLIB_LIBS $[ZLIB_LIBS]
-#set HAVE_ZLIB $[HAVE_ZLIB]
-
-#set GL_IPATH $[unixfilename $[GL_IPATH]]
-#set GL_LPATH $[unixfilename $[GL_LPATH]]
-#set GL_LIBS $[GL_LIBS]
-#set HAVE_GL $[HAVE_GL]
-
-#set GLES_IPATH $[unixfilename $[GLES_IPATH]]
-#set GLES_LPATH $[unixfilename $[GLES_LPATH]]
-#set GLES_LIBS $[GLES_LIBS]
-#set HAVE_GLES $[HAVE_GLES]
-
-#set GLES2_IPATH $[unixfilename $[GLES2_IPATH]]
-#set GLES2_LPATH $[unixfilename $[GLES2_LPATH]]
-#set GLES2_LIBS $[GLES2_LIBS]
-#set HAVE_GLES2 $[HAVE_GLES2]
-
-#set GLX_IPATH $[unixfilename $[GLX_IPATH]]
-#set GLX_LPATH $[unixfilename $[GLX_LPATH]]
-#set HAVE_GLX $[HAVE_GLX]
-
-#set EGL_IPATH $[unixfilename $[EGL_IPATH]]
-#set EGL_LPATH $[unixfilename $[EGL_LPATH]]
-#set EGL_LIBS $[unixfilename $[EGL_LIBS]]
-#set HAVE_EGL $[HAVE_EGL]
-
-#set HAVE_WGL $[HAVE_WGL]
-
-#set HAVE_COCOA $[HAVE_COCOA]
-#set HAVE_CARBON $[HAVE_CARBON]
-
-#set DX9_IPATH $[unixfilename $[DX9_IPATH]]
-#set DX9_LPATH $[unixfilename $[DX9_LPATH]]
-#set DX9_LIBS $[DX9_LIBS]
-#set HAVE_DX9 $[HAVE_DX9]
-
-#set OPENCV_IPATH $[unixfilename $[OPENCV_IPATH]]
-#set OPENCV_LPATH $[unixfilename $[OPENCV_LPATH]]
-#set OPENCV_LIBS $[OPENCV_LIBS]
-#set HAVE_OPENCV $[HAVE_OPENCV]
-
-#set FFMPEG_IPATH $[unixfilename $[FFMPEG_IPATH]]
-#set FFMPEG_LPATH $[unixfilename $[FFMPEG_LPATH]]
-#set FFMPEG_LIBS $[FFMPEG_LIBS]
-#set HAVE_FFMPEG $[HAVE_FFMPEG]
-
-#set ODE_IPATH $[unixfilename $[ODE_IPATH]]
-#set ODE_LPATH $[unixfilename $[ODE_LPATH]]
-#set ODE_LIBS $[ODE_LIBS]
-#set HAVE_ODE $[HAVE_ODE]
-
-#set AWESOMIUM_IPATH $[unixfilename $[AWESOMIUM_IPATH]]
-#set AWESOMIUM_LPATH $[unixfilename $[AWESOMIUM_LPATH]]
-#set AWESOMIUM_LIBS $[AWESOMIUM_LIBS]
-#set AWESOMIUM_FRAMEWORK $[unixfilename $[AWESOMIUM_FRAMEWORK]]
-#set HAVE_AWESOMIUM $[HAVE_AWESOMIUM]
-
-#set NPAPI_IPATH $[unixfilename $[NPAPI_IPATH]]
-#set NPAPI_LPATH $[unixfilename $[NPAPI_LPATH]]
-#set NPAPI_LIBS $[NPAPI_LIBS]
-#set HAVE_NPAPI $[HAVE_NPAPI]
-
-#set HAVE_THREADS $[HAVE_THREADS]
-#set DEBUG_THREADS $[DEBUG_THREADS]
-#set MUTEX_SPINLOCK $[MUTEX_SPINLOCK]
-
-#set DO_PSTATS $[DO_PSTATS]
-
-#set RAD_MSS_IPATH $[unixfilename $[RAD_MSS_IPATH]]
-#set RAD_MSS_LPATH $[unixfilename $[RAD_MSS_LPATH]]
-#set RAD_MSS_LIBS $[RAD_MSS_LIBS]
-#set HAVE_RAD_MSS $[HAVE_RAD_MSS]
-
-#set FMODEX_IPATH $[unixfilename $[FMODEX_IPATH]]
-#set FMODEX_LPATH $[unixfilename $[FMODEX_LPATH]]
-#set FMODEX_LIBS $[FMODEX_LIBS]
-#set HAVE_FMODEX $[HAVE_FMODEX]
-
-#set OPENAL_IPATH $[unixfilename $[OPENAL_IPATH]]
-#set OPENAL_LPATH $[unixfilename $[OPENAL_LPATH]]
-#set OPENAL_LIBS $[OPENAL_LIBS]
-#set OPENAL_FRAMEWORK $[unixfilename $[OPENAL_FRAMEWORK]]
-#set HAVE_OPENAL $[HAVE_OPENAL]
-
-#set PHYSX_IPATH $[unixfilename $[PHYSX_IPATH]]
-#set PHYSX_LPATH $[unixfilename $[PHYSX_LPATH]]
-#set PHYSX_LIBS $[PHYSX_LIBS]
-#set HAVE_PHYSX $[HAVE_PHYSX]
-
-#set SPEEDTREE_IPATH $[unixfilename $[SPEEDTREE_IPATH]]
-#set SPEEDTREE_LPATH $[unixfilename $[SPEEDTREE_LPATH]]
-#set SPEEDTREE_LIBS $[SPEEDTREE_LIBS]
-#set HAVE_SPEEDTREE $[HAVE_SPEEDTREE]
-
-#set PKG_CONFIG $[PKG_CONFIG]
-#set HAVE_GTK $[HAVE_GTK]
-
-#set FREETYPE_CONFIG $[FREETYPE_CONFIG]
-#set HAVE_FREETYPE $[HAVE_FREETYPE]
-#set FREETYPE_CFLAGS $[FREETYPE_CFLAGS]
-#set FREETYPE_IPATH $[unixfilename $[FREETYPE_IPATH]]
-#set FREETYPE_LPATH $[unixfilename $[FREETYPE_LPATH]]
-#set FREETYPE_LIBS $[FREETYPE_LIBS]
-
-#set WX_CONFIG $[WX_CONFIG]
-#set HAVE_WX $[HAVE_WX]
-#set WX_CFLAGS $[WX_CFLAGS]
-#set WX_IPATH $[unixfilename $[WX_IPATH]]
-#set WX_LPATH $[unixfilename $[WX_LPATH]]
-#set WX_LIBS $[WX_LIBS]
-
-#set FLTK_CONFIG $[FLTK_CONFIG]
-#set HAVE_FLTK $[HAVE_FLTK]
-#set FLTK_CFLAGS $[FLTK_CFLAGS]
-#set FLTK_IPATH $[unixfilename $[FLTK_IPATH]]
-#set FLTK_LPATH $[unixfilename $[FLTK_LPATH]]
-#set FLTK_LIBS $[FLTK_LIBS]
-
-
-#set MAYA_LOCATION $[unixfilename $[MAYA_LOCATION]]
-#set HAVE_MAYA $[HAVE_MAYA]
-
-#set SOFTIMAGE_LOCATION $[unixfilename $[SOFTIMAGE_LOCATION]]
-#set HAVE_SOFTIMAGE $[HAVE_SOFTIMAGE]
-
-#set FCOLLADA_IPATH $[unixfilename $[FCOLLADA_IPATH]]
-#set FCOLLADA_LPATH $[unixfilename $[FCOLLADA_LPATH]]
-#set FCOLLADA_LIBS $[FCOLLADA_LIBS]
-#set HAVE_FCOLLADA $[HAVE_FCOLLADA]
-
-#set COLLADA14DOM_IPATH $[unixfilename $[COLLADA14DOM_IPATH]]
-#set COLLADA14DOM_LPATH $[unixfilename $[COLLADA14DOM_LPATH]]
-#set COLLADA14DOM_LIBS $[COLLADA14DOM_LIBS]
-#set HAVE_COLLADA14DOM $[HAVE_COLLADA14DOM]
-
-#set COLLADA15DOM_IPATH $[unixfilename $[COLLADA15DOM_IPATH]]
-#set COLLADA15DOM_LPATH $[unixfilename $[COLLADA15DOM_LPATH]]
-#set COLLADA15DOM_LIBS $[COLLADA15DOM_LIBS]
-#set HAVE_COLLADA15DOM $[HAVE_COLLADA15DOM]
-
-#set ASSIMP_IPATH $[unixfilename $[ASSIMP_IPATH]]
-#set ASSIMP_LPATH $[unixfilename $[ASSIMP_LPATH]]
-#set ASSIMP_LIBS $[ASSIMP_LIBS]
-#set HAVE_ASSIMP $[HAVE_ASSIMP]
-
-#set ARTOOLKIT_IPATH $[unixfilename $[ARTOOLKIT_IPATH]]
-#set ARTOOLKIT_LPATH $[unixfilename $[ARTOOLKIT_LPATH]]
-#set ARTOOLKIT_LIBS $[ARTOOLKIT_LIBS]
-#set HAVE_ARTOOLKIT $[HAVE_ARTOOLKIT]
-
-#set ROCKET_IPATH $[unixfilename $[ROCKET_IPATH]]
-#set ROCKET_LPATH $[unixfilename $[ROCKET_LPATH]]
-#set ROCKET_LIBS $[ROCKET_LIBS]
-#set HAVE_ROCKET $[HAVE_ROCKET]
-#set HAVE_ROCKET_PYTHON $[HAVE_ROCKET_PYTHON]
-
-#set BULLET_IPATH $[unixfilename $[BULLET_IPATH]]
-#set BULLET_LPATH $[unixfilename $[BULLET_LPATH]]
-#set BULLET_LIBS $[BULLET_LIBS]
-#set HAVE_BULLET $[HAVE_BULLET]
-
-#set VORBIS_IPATH $[unixfilename $[VORBIS_IPATH]]
-#set VORBIS_LPATH $[unixfilename $[VORBIS_LPATH]]
-#set VORBIS_LIBS $[VORBIS_LIBS]
-#set HAVE_VORBIS $[HAVE_VORBIS]
-
-// Now infer a few more variables based on what was defined.
-#if $[and $[HAVE_GTK],$[PKG_CONFIG]]
-  #define cflags $[shell $[PKG_CONFIG] gtk+-2.0 --cflags]
-  #define libs $[shell $[PKG_CONFIG] gtk+-2.0 --libs]
-
-  #define GTK_CFLAGS $[filter-out -I%,$[cflags]]
-  #define GTK_IPATH $[unique $[patsubst -I%,%,$[filter -I%,$[cflags]]]]
-  #define GTK_LPATH $[unique $[patsubst -L%,%,$[filter -L%,$[libs]]]]
-  #define GTK_LIBS $[patsubst -l%,%,$[filter -l%,$[libs]]]
-#endif
-
-#if $[and $[HAVE_FREETYPE],$[FREETYPE_CONFIG]]
-  #define cflags $[shell $[FREETYPE_CONFIG] --cflags]
-  #define libs $[shell $[FREETYPE_CONFIG] --libs]
-
-  #define FREETYPE_CFLAGS $[filter-out -I%,$[cflags]]
-  #define FREETYPE_IPATH $[unique $[patsubst -I%,%,$[filter -I%,$[cflags]]]]
-  #define FREETYPE_LPATH $[unique $[patsubst -L%,%,$[filter -L%,$[libs]]]]
-  #define FREETYPE_LIBS $[patsubst -l%,%,$[filter -l%,$[libs]]]
-#endif
-
-#if $[and $[HAVE_WX],$[WX_CONFIG]]
-  #define cflags $[shell $[WX_CONFIG] --cflags]
-  #define libs $[shell $[WX_CONFIG] --libs core,base]
-
-  #define WX_CFLAGS $[filter-out -I%,$[cflags]]
-  #define WX_IPATH $[unique $[patsubst -I%,%,$[filter -I%,$[cflags]]]]
-  #define WX_LPATH $[unique $[patsubst -L%,%,$[filter -L%,$[libs]]]]
-  #define WX_LFLAGS $[filter-out -l%,$[libs]]
-  #define WX_LIBS $[patsubst -l%,%,$[filter -l%,$[libs]]]
-#endif
-
-#if $[and $[HAVE_FLTK],$[FLTK_CONFIG]]
-  #define cflags $[shell $[FLTK_CONFIG] --cflags]
-  #define libs $[shell $[FLTK_CONFIG] --ldflags]
-
-  #define FLTK_CFLAGS $[filter-out -I%,$[cflags]]
-  #define FLTK_IPATH $[unique $[patsubst -I%,%,$[filter -I%,$[cflags]]]]
-  #define FLTK_LPATH $[unique $[patsubst -L%,%,$[filter -L%,$[libs]]]]
-  #define FLTK_LFLAGS $[filter-out -l%,$[libs]]
-  #define FLTK_LIBS $[patsubst -l%,%,$[filter -l%,$[libs]]]
-#endif
-
-#if $[HAVE_PHYSX]
-  #define GENPYCODE_LIBS $[GENPYCODE_LIBS] libpandaphysx
-#endif
-
-#if $[HAVE_SPEEDTREE]
-  #define GENPYCODE_LIBS $[GENPYCODE_LIBS] libpandaspeedtree
-#endif
-
-#if $[HAVE_AWESOMIUM]
-  #define GENPYCODE_LIBS $[GENPYCODE_LIBS] libpandaawesomium
-#endif
-
-// Finally, include the system configure file.
-#include $[THISDIRPREFIX]pptempl/System.pp

+ 4 - 0
dtool/PandaVersion.pp

@@ -2,6 +2,10 @@
 // by Package.pp, which puts it in the global namespace for all
 // ppremake scripts for Panda.
 
+// Actually, we don't have ppremake any more, but this file is still
+// being parsed today by makepanda.  We should probably find a better
+// place to put this.
+
 // Use spaces to separate the major, minor, and sequence numbers here.
 #define PANDA_VERSION 1 10 0
 

+ 0 - 7
dtool/Sources.pp

@@ -1,7 +0,0 @@
-// This is the toplevel directory.  It contains configure.in and other
-// stuff.
-
-#define DIR_TYPE toplevel
-
-#define CONFIG_HEADER dtool_config.h
-#define EXTRA_PPREMAKE_SOURCE PandaVersion.pp

+ 0 - 7
dtool/metalibs/Sources.pp

@@ -1,7 +0,0 @@
-// This is a group directory: a directory level above a number of
-// source subdirectories.
-
-#define DIR_TYPE group
-
-// The metalibs directory always depends on the src directory.
-#define DEPENDS src

Some files were not shown because too many files changed in this diff