makewheel.py 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944
  1. """
  2. Generates a wheel (.whl) file from the output of makepanda.
  3. """
  4. import json
  5. import sys
  6. import os
  7. from os.path import join
  8. import zipfile
  9. import hashlib
  10. import tempfile
  11. import subprocess
  12. import time
  13. import struct
  14. from optparse import OptionParser
  15. from base64 import urlsafe_b64encode
  16. from makepandacore import LocateBinary, GetExtensionSuffix, SetVerbose, GetVerbose, GetMetadataValue, CrossCompiling, GetThirdpartyDir, SDK, GetStrip
  17. from locations import get_config_var
  18. from sysconfig import get_platform
  19. def get_abi_tag():
  20. ver = 'cp%d%d' % sys.version_info[:2]
  21. if hasattr(sys, 'abiflags'):
  22. return ver + sys.abiflags
  23. gil_disabled = get_config_var("Py_GIL_DISABLED")
  24. if gil_disabled and int(gil_disabled):
  25. return ver + 't'
  26. return ver
  27. def is_exe_file(path):
  28. return os.path.isfile(path) and path.lower().endswith('.exe')
  29. def is_elf_file(path):
  30. base = os.path.basename(path)
  31. return os.path.isfile(path) and '.' not in base and \
  32. open(path, 'rb').read(4) == b'\x7FELF'
  33. def is_macho_or_fat_file(path):
  34. base = os.path.basename(path)
  35. return os.path.isfile(path) and '.' not in base and \
  36. open(path, 'rb').read(4) in (b'\xFE\xED\xFA\xCE', b'\xCE\xFA\xED\xFE',
  37. b'\xFE\xED\xFA\xCF', b'\xCF\xFA\xED\xFE',
  38. b'\xCA\xFE\xBA\xBE', b'\xBE\xBA\xFE\xCA',
  39. b'\xCA\xFE\xBA\xBF', b'\xBF\xBA\xFE\xCA')
  40. def is_fat_file(path):
  41. return os.path.isfile(path) and \
  42. open(path, 'rb').read(4) in (b'\xCA\xFE\xBA\xBE', b'\xBE\xBA\xFE\xCA',
  43. b'\xCA\xFE\xBA\xBF', b'\xBF\xBA\xFE\xCA')
  44. def get_python_ext_module_dir():
  45. if CrossCompiling():
  46. return os.path.join(GetThirdpartyDir(), "python", "lib", SDK["PYTHONVERSION"], "lib-dynload")
  47. else:
  48. import _ctypes
  49. return os.path.dirname(_ctypes.__file__)
  50. if sys.platform in ('win32', 'cygwin'):
  51. is_executable = is_exe_file
  52. elif sys.platform == 'darwin':
  53. is_executable = is_macho_or_fat_file
  54. else:
  55. is_executable = is_elf_file
  56. # Other global parameters
  57. PY_VERSION = "cp{0}{1}".format(*sys.version_info)
  58. ABI_TAG = get_abi_tag()
  59. EXCLUDE_EXT = [".pyc", ".pyo", ".N", ".prebuilt", ".xcf", ".plist", ".vcproj", ".sln"]
  60. # Plug-ins to install.
  61. PLUGIN_LIBS = ["pandagl", "pandagles", "pandagles2", "pandadx9", "p3tinydisplay", "p3ptloader", "p3assimp", "p3ffmpeg", "p3openal_audio", "p3fmod_audio", "p3headlessgl"]
  62. # Libraries included in manylinux ABI that should be ignored. See PEP 513/571/599.
  63. MANYLINUX_LIBS = [
  64. "libgcc_s.so.1", "libstdc++.so.6", "libm.so.6", "libdl.so.2", "librt.so.1",
  65. "libcrypt.so.1", "libc.so.6", "libnsl.so.1", "libutil.so.1",
  66. "libpthread.so.0", "libresolv.so.2", "libX11.so.6", "libXext.so.6",
  67. "libXrender.so.1", "libICE.so.6", "libSM.so.6", "libGL.so.1",
  68. "libgobject-2.0.so.0", "libgthread-2.0.so.0", "libglib-2.0.so.0",
  69. # These are not mentioned in manylinux1 spec but should nonetheless always
  70. # be excluded.
  71. "linux-vdso.so.1", "linux-gate.so.1", "ld-linux.so.2", "libdrm.so.2",
  72. "ld-linux-x86-64.so.2", "ld-linux-aarch64.so.1",
  73. "libEGL.so.1", "libOpenGL.so.0", "libGLX.so.0", "libGLdispatch.so.0",
  74. "libGLESv2.so.2",
  75. ]
  76. # Binaries to never scan for dependencies on non-Windows systems.
  77. IGNORE_UNIX_DEPS_OF = [
  78. "panda3d_tools/pstats",
  79. ]
  80. # Tools to exclude from the wheel.
  81. EXCLUDE_BINARIES = [
  82. 'eggcacher',
  83. 'packpanda',
  84. 'interrogate',
  85. 'interrogate_module',
  86. 'test_interrogate',
  87. 'parse_file',
  88. 'run_tests',
  89. ]
  90. WHEEL_DATA = """Wheel-Version: 1.0
  91. Generator: makepanda
  92. Root-Is-Purelib: false
  93. Tag: {0}-{1}-{2}
  94. """
  95. PROJECT_URLS = dict([line.split('=', 1) for line in GetMetadataValue('project_urls').strip().splitlines()])
  96. METADATA = {
  97. "license": GetMetadataValue('license'),
  98. "name": GetMetadataValue('name'),
  99. "metadata_version": "2.1",
  100. "generator": "makepanda",
  101. "summary": GetMetadataValue('description'),
  102. "extensions": {
  103. "python.details": {
  104. "project_urls": dict(PROJECT_URLS, Home=GetMetadataValue('url')),
  105. "document_names": {
  106. "license": "LICENSE.txt"
  107. },
  108. "contacts": [
  109. {
  110. "role": "author",
  111. "name": GetMetadataValue('author'),
  112. "email": GetMetadataValue('author_email'),
  113. }
  114. ]
  115. }
  116. },
  117. "classifiers": GetMetadataValue('classifiers'),
  118. }
  119. DESCRIPTION = """
  120. The Panda3D free 3D game engine
  121. ===============================
  122. Panda3D is a powerful 3D engine written in C++, with a complete set of Python
  123. bindings. Unlike other engines, these bindings are automatically generated,
  124. meaning that they are always up-to-date and complete: all functions of the
  125. engine can be controlled from Python. All major Panda3D applications have been
  126. written in Python, this is the intended way of using the engine.
  127. Panda3D now supports automatic shader generation, which now means you can use
  128. normal maps, gloss maps, glow maps, HDR, cartoon shading, and the like without
  129. having to write any shaders.
  130. Panda3D is a modern engine supporting advanced features such as shaders,
  131. stencil, and render-to-texture. Panda3D is unusual in that it emphasizes a
  132. short learning curve, rapid development, and extreme stability and robustness.
  133. Panda3D is free software that runs under Windows, Linux, or macOS.
  134. The Panda3D team is very concerned with making the engine accessible to new
  135. users. We provide a detailed manual, a complete API reference, and a large
  136. collection of sample programs to help you get started. We have active forums,
  137. with many helpful users, and the developers are regularly online to answer
  138. questions.
  139. """
  140. PANDA3D_TOOLS_INIT = """import os, sys
  141. import panda3d
  142. dir = os.path.dirname(panda3d.__file__)
  143. del panda3d
  144. if sys.platform in ('win32', 'cygwin'):
  145. path_var = 'PATH'
  146. if hasattr(os, 'add_dll_directory'):
  147. os.add_dll_directory(dir)
  148. elif sys.platform == 'darwin':
  149. path_var = 'DYLD_LIBRARY_PATH'
  150. else:
  151. path_var = 'LD_LIBRARY_PATH'
  152. if not os.environ.get(path_var):
  153. os.environ[path_var] = dir
  154. else:
  155. os.environ[path_var] = dir + os.pathsep + os.environ[path_var]
  156. del os, sys, path_var, dir
  157. def _exec_tool(tool):
  158. import os, sys
  159. from subprocess import Popen
  160. tools_dir = os.path.dirname(__file__)
  161. handle = Popen(sys.argv, executable=os.path.join(tools_dir, tool))
  162. try:
  163. try:
  164. return handle.wait()
  165. except KeyboardInterrupt:
  166. # Give the program a chance to handle the signal gracefully.
  167. return handle.wait()
  168. except:
  169. handle.kill()
  170. handle.wait()
  171. raise
  172. # Register all the executables in this directory as global functions.
  173. {0}
  174. """
  175. def parse_dependencies_windows(data):
  176. """ Parses the given output from dumpbin /dependents to determine the list
  177. of dll's this executable file depends on. """
  178. lines = data.splitlines()
  179. li = 0
  180. while li < len(lines):
  181. line = lines[li]
  182. li += 1
  183. if line.find(' has the following dependencies') != -1:
  184. break
  185. if li < len(lines):
  186. line = lines[li]
  187. if line.strip() == '':
  188. # Skip a blank line.
  189. li += 1
  190. # Now we're finding filenames, until the next blank line.
  191. filenames = []
  192. while li < len(lines):
  193. line = lines[li]
  194. li += 1
  195. line = line.strip()
  196. if line == '':
  197. # We're done.
  198. return filenames
  199. filenames.append(line)
  200. # At least we got some data.
  201. return filenames
  202. def parse_dependencies_unix(data):
  203. """ Parses the given output from otool -XL or ldd to determine the list of
  204. libraries this executable file depends on. """
  205. lines = data.splitlines()
  206. filenames = []
  207. for l in lines:
  208. l = l.strip()
  209. if l != "statically linked":
  210. filenames.append(l.split(' ', 1)[0])
  211. return filenames
  212. def _scan_dependencies_elf(elf):
  213. deps = []
  214. ident = elf.read(12)
  215. # Make sure we read in the correct endianness and integer size
  216. byte_order = "<>"[ord(ident[1:2]) - 1]
  217. elf_class = ord(ident[0:1]) - 1 # 0 = 32-bits, 1 = 64-bits
  218. header_struct = byte_order + ("HHIIIIIHHHHHH", "HHIQQQIHHHHHH")[elf_class]
  219. section_struct = byte_order + ("4xI8xIII8xI", "4xI16xQQI12xQ")[elf_class]
  220. dynamic_struct = byte_order + ("iI", "qQ")[elf_class]
  221. type, machine, version, entry, phoff, shoff, flags, ehsize, phentsize, phnum, shentsize, shnum, shstrndx \
  222. = struct.unpack(header_struct, elf.read(struct.calcsize(header_struct)))
  223. dynamic_sections = []
  224. string_tables = {}
  225. # Seek to the section header table and find the .dynamic section.
  226. elf.seek(shoff)
  227. for i in range(shnum):
  228. type, offset, size, link, entsize = struct.unpack_from(section_struct, elf.read(shentsize))
  229. if type == 6 and link != 0: # DYNAMIC type, links to string table
  230. dynamic_sections.append((offset, size, link, entsize))
  231. string_tables[link] = None
  232. # Read the relevant string tables.
  233. for idx in string_tables.keys():
  234. elf.seek(shoff + idx * shentsize)
  235. type, offset, size, link, entsize = struct.unpack_from(section_struct, elf.read(shentsize))
  236. if type != 3: continue
  237. elf.seek(offset)
  238. string_tables[idx] = elf.read(size)
  239. # Loop through the dynamic sections to get the NEEDED entries.
  240. needed = []
  241. for offset, size, link, entsize in dynamic_sections:
  242. elf.seek(offset)
  243. data = elf.read(entsize)
  244. tag, val = struct.unpack_from(dynamic_struct, data)
  245. # Read tags until we find a NULL tag.
  246. while tag != 0:
  247. if tag == 1: # A NEEDED entry. Read it from the string table.
  248. string = string_tables[link][val : string_tables[link].find(b'\0', val)]
  249. needed.append(string.decode('utf-8'))
  250. data = elf.read(entsize)
  251. tag, val = struct.unpack_from(dynamic_struct, data)
  252. elf.close()
  253. return needed
  254. def scan_dependencies(pathname):
  255. """ Checks the named file for DLL dependencies, and adds any appropriate
  256. dependencies found into pluginDependencies and dependentFiles. """
  257. with open(pathname, 'rb') as fh:
  258. if fh.read(4) == b'\x7FELF':
  259. return _scan_dependencies_elf(fh)
  260. if sys.platform == "darwin":
  261. command = ['otool', '-XL', pathname]
  262. elif sys.platform in ("win32", "cygwin"):
  263. command = ['dumpbin', '/dependents', pathname]
  264. else:
  265. sys.exit("Don't know how to determine dependencies from %s" % (pathname))
  266. process = subprocess.Popen(command, stdout=subprocess.PIPE, universal_newlines=True)
  267. output, unused_err = process.communicate()
  268. retcode = process.poll()
  269. if retcode:
  270. raise subprocess.CalledProcessError(retcode, command[0], output=output)
  271. filenames = None
  272. if sys.platform in ("win32", "cygwin"):
  273. filenames = parse_dependencies_windows(output)
  274. else:
  275. filenames = parse_dependencies_unix(output)
  276. if filenames is None:
  277. sys.exit("Unable to determine dependencies from %s" % (pathname))
  278. if sys.platform == "darwin" and len(filenames) > 0:
  279. # Filter out the library ID.
  280. if os.path.basename(filenames[0]).split('.', 1)[0] == os.path.basename(pathname).split('.', 1)[0]:
  281. del filenames[0]
  282. return filenames
  283. class WheelFile(object):
  284. def __init__(self, name, version, platform):
  285. self.name = name
  286. self.version = version
  287. self.platform = platform
  288. wheel_name = "{0}-{1}-{2}-{3}-{4}.whl".format(
  289. name, version, PY_VERSION, ABI_TAG, platform)
  290. print("Writing %s" % (wheel_name))
  291. self.zip_file = zipfile.ZipFile(wheel_name, 'w', zipfile.ZIP_DEFLATED)
  292. self.records = []
  293. # Used to locate dependency libraries.
  294. self.lib_path = []
  295. self.dep_paths = {}
  296. self.ignore_deps = set()
  297. # This can be set if a reproducible (deterministic) build is desired, in
  298. # which case we have to clamp all dates to the given SOURCE_DATE_EPOCH.
  299. epoch = os.environ.get('SOURCE_DATE_EPOCH')
  300. self.max_date_time = time.localtime(int(epoch) if epoch else time.time())[:6]
  301. if self.max_date_time < (1980, 1, 1, 0, 0, 0):
  302. # Earliest representable time in zip archives.
  303. self.max_date_time = (1980, 1, 1, 0, 0, 0)
  304. def consider_add_dependency(self, target_path, dep, search_path=None):
  305. """Considers adding a dependency library.
  306. Returns the target_path if it was added, which may be different from
  307. target_path if it was already added earlier, or None if it wasn't."""
  308. if dep in self.dep_paths:
  309. # Already considered this.
  310. return self.dep_paths[dep]
  311. self.dep_paths[dep] = None
  312. if dep in self.ignore_deps:
  313. if GetVerbose():
  314. print("Ignoring {0} (explicitly ignored)".format(dep))
  315. return
  316. if not self.platform.startswith("android"):
  317. if dep.lower().startswith("python") or os.path.basename(dep).startswith("libpython"):
  318. if GetVerbose():
  319. print("Ignoring {0} (explicitly ignored)".format(dep))
  320. return
  321. if self.platform.startswith("macosx"):
  322. if dep.endswith(".so"):
  323. # Temporary hack for 1.9, which had link deps on modules.
  324. return
  325. if dep.startswith("/System/"):
  326. return
  327. if dep.startswith('/'):
  328. source_path = dep
  329. else:
  330. source_path = None
  331. if search_path is None:
  332. search_path = self.lib_path
  333. for lib_dir in search_path:
  334. # Ignore static stuff.
  335. path = os.path.join(lib_dir, dep)
  336. if os.path.isfile(path):
  337. source_path = os.path.normpath(path)
  338. break
  339. if not source_path:
  340. # Couldn't find library in the panda3d lib dir.
  341. if GetVerbose():
  342. print("Ignoring {0} (not in search path)".format(dep))
  343. return
  344. self.dep_paths[dep] = target_path
  345. self.write_file(target_path, source_path)
  346. return target_path
  347. def write_file(self, target_path, source_path):
  348. """Adds the given file to the .whl file."""
  349. orig_source_path = source_path
  350. # If this is a .so file, we should set the rpath appropriately.
  351. temp = None
  352. basename, ext = os.path.splitext(source_path)
  353. if ext in ('.so', '.dylib') or '.so.' in os.path.basename(source_path) or \
  354. (not ext and is_executable(source_path)):
  355. # Scan Unix dependencies.
  356. if target_path not in IGNORE_UNIX_DEPS_OF:
  357. deps = scan_dependencies(source_path)
  358. else:
  359. deps = []
  360. suffix = ''
  361. if '.so' in os.path.basename(source_path):
  362. suffix = '.so'
  363. elif ext == '.dylib':
  364. suffix = '.dylib'
  365. temp = tempfile.NamedTemporaryFile(suffix=suffix, prefix='whl', delete=False)
  366. # On macOS, if no fat wheel was requested, extract the right architecture.
  367. if self.platform.startswith("macosx") and is_fat_file(source_path) \
  368. and not self.platform.endswith("_intel") \
  369. and "_fat" not in self.platform \
  370. and "_universal" not in self.platform:
  371. if self.platform.endswith("_x86_64"):
  372. arch = 'x86_64'
  373. else:
  374. arch = self.platform.split('_')[-1]
  375. subprocess.call(['lipo', source_path, '-extract', arch, '-output', temp.name])
  376. else:
  377. # Otherwise, just copy it over.
  378. temp.write(open(source_path, 'rb').read())
  379. temp.close()
  380. os.chmod(temp.name, os.stat(temp.name).st_mode | 0o711)
  381. # Now add dependencies. On macOS, fix @loader_path references.
  382. if self.platform.startswith("macosx"):
  383. if source_path.endswith('deploy-stubw'):
  384. deps_path = '@executable_path/../Frameworks'
  385. else:
  386. deps_path = '@loader_path'
  387. loader_path = [os.path.dirname(source_path)]
  388. for dep in deps:
  389. if dep.endswith('/Python'):
  390. # If this references the Python framework, change it
  391. # to reference libpython instead.
  392. new_dep = deps_path + '/libpython{0}.{1}.dylib'.format(*sys.version_info)
  393. elif '@loader_path' in dep:
  394. dep_path = dep.replace('@loader_path', '.')
  395. target_dep = os.path.dirname(target_path) + '/' + os.path.basename(dep)
  396. target_dep = self.consider_add_dependency(target_dep, dep_path, loader_path)
  397. if not target_dep:
  398. # It won't be included, so no use adjusting the path.
  399. continue
  400. new_dep = os.path.join(deps_path, os.path.relpath(target_dep, os.path.dirname(target_path)))
  401. elif '@rpath' in dep:
  402. # Unlike makepanda, CMake uses @rpath instead of
  403. # @loader_path. This means we can just search for the
  404. # dependencies like normal.
  405. dep_path = dep.replace('@rpath', '.')
  406. target_dep = os.path.dirname(target_path) + '/' + os.path.basename(dep)
  407. self.consider_add_dependency(target_dep, dep_path)
  408. continue
  409. elif dep.startswith('/Library/Frameworks/Python.framework/') or \
  410. dep.startswith('/Library/Frameworks/PythonT.framework/'):
  411. # Add this dependency if it's in the Python directory.
  412. target_dep = os.path.dirname(target_path) + '/' + os.path.basename(dep)
  413. target_dep = self.consider_add_dependency(target_dep, dep, loader_path)
  414. if not target_dep:
  415. # It won't be included, so no use adjusting the path.
  416. continue
  417. new_dep = os.path.join(deps_path, os.path.relpath(target_dep, os.path.dirname(target_path)))
  418. else:
  419. if '/' in dep:
  420. if GetVerbose():
  421. print("Ignoring dependency %s" % (dep))
  422. continue
  423. subprocess.call(["install_name_tool", "-change", dep, new_dep, temp.name])
  424. # Make sure it has an ad-hoc code signature.
  425. subprocess.call(["codesign", "-f", "-s", "-", temp.name])
  426. else:
  427. # On other unixes, we just add dependencies normally.
  428. for dep in deps:
  429. # Only include dependencies with relative path, for now.
  430. if '/' in dep:
  431. continue
  432. if self.platform.startswith('android') and '.so.' in dep:
  433. # Change .so.1.2 suffix to .so, to allow loading in .apk
  434. new_dep = dep.rpartition('.so.')[0] + '.so'
  435. subprocess.call(["patchelf", "--replace-needed", dep, new_dep, temp.name])
  436. target_dep = os.path.dirname(target_path) + '/' + new_dep
  437. else:
  438. target_dep = os.path.dirname(target_path) + '/' + dep
  439. self.consider_add_dependency(target_dep, dep)
  440. subprocess.call([GetStrip(), "-s", temp.name])
  441. if self.platform.startswith('android'):
  442. # We must link explicitly with Python, because the usual
  443. # -rdynamic trick doesn't work from a shared library loaded
  444. # through ANativeActivity.
  445. if suffix == '.so' and not os.path.basename(source_path).startswith('lib'):
  446. pylib_name = "libpython" + get_config_var('LDVERSION') + ".so"
  447. subprocess.call(["patchelf", "--add-needed", pylib_name, temp.name])
  448. else:
  449. # On other systems, we use the rpath to force it to locate
  450. # dependencies in the same directory.
  451. subprocess.call(["patchelf", "--force-rpath", "--set-rpath", "$ORIGIN", temp.name])
  452. source_path = temp.name
  453. ext = ext.lower()
  454. if ext in ('.dll', '.pyd', '.exe'):
  455. # Scan and add Win32 dependencies.
  456. for dep in scan_dependencies(source_path):
  457. target_dep = os.path.dirname(target_path) + '/' + dep
  458. self.consider_add_dependency(target_dep, dep)
  459. if GetVerbose():
  460. print("Adding {0} from {1}".format(target_path, orig_source_path))
  461. zinfo = zipfile.ZipInfo.from_file(source_path, target_path)
  462. zinfo.compress_type = self.zip_file.compression
  463. if zinfo.date_time > self.max_date_time:
  464. zinfo.date_time = self.max_date_time
  465. # Copy the data to the zip file, while also calculating the SHA-256.
  466. size = 0
  467. sha = hashlib.sha256()
  468. with open(source_path, 'rb') as source_fp, self.zip_file.open(zinfo, 'w') as target_fp:
  469. data = source_fp.read(1024 * 1024)
  470. while data:
  471. size += len(data)
  472. target_fp.write(data)
  473. sha.update(data)
  474. data = source_fp.read(1024 * 1024)
  475. # Save it in PEP-0376 format for writing out later.
  476. digest = urlsafe_b64encode(sha.digest()).decode('ascii')
  477. digest = digest.rstrip('=')
  478. self.records.append("{0},sha256={1},{2}\n".format(target_path, digest, size))
  479. #if temp:
  480. # os.unlink(temp.name)
  481. def write_file_data(self, target_path, source_data):
  482. """Adds the given file from a string."""
  483. sha = hashlib.sha256()
  484. sha.update(source_data.encode())
  485. digest = urlsafe_b64encode(sha.digest()).decode('ascii')
  486. digest = digest.rstrip('=')
  487. self.records.append("{0},sha256={1},{2}\n".format(target_path, digest, len(source_data)))
  488. if GetVerbose():
  489. print("Adding %s from data" % target_path)
  490. zinfo = zipfile.ZipInfo(filename=target_path,
  491. date_time=self.max_date_time)
  492. zinfo.compress_type = self.zip_file.compression
  493. zinfo.external_attr = 0o600 << 16
  494. self.zip_file.writestr(zinfo, source_data)
  495. def write_directory(self, target_dir, source_dir):
  496. """Adds the given directory recursively to the .whl file."""
  497. for root, dirs, files in os.walk(source_dir):
  498. dirs.sort()
  499. for file in sorted(files):
  500. if os.path.splitext(file)[1] in EXCLUDE_EXT:
  501. continue
  502. source_path = os.path.join(root, file)
  503. target_path = os.path.join(target_dir, os.path.relpath(source_path, source_dir))
  504. target_path = target_path.replace('\\', '/')
  505. self.write_file(target_path, source_path)
  506. def close(self):
  507. # Write the RECORD file.
  508. record_file = "{0}-{1}.dist-info/RECORD".format(self.name, self.version)
  509. self.records.append(record_file + ",,\n")
  510. zinfo = zipfile.ZipInfo(filename=record_file,
  511. date_time=self.max_date_time)
  512. zinfo.compress_type = self.zip_file.compression
  513. zinfo.external_attr = 0o600 << 16
  514. self.zip_file.writestr(zinfo, "".join(self.records))
  515. self.zip_file.close()
  516. def makewheel(version, output_dir, platform=None):
  517. if sys.platform not in ("win32", "darwin") and not sys.platform.startswith("cygwin"):
  518. if not LocateBinary("patchelf"):
  519. raise Exception("patchelf is required when building a Linux wheel.")
  520. if sys.version_info < (3, 8):
  521. raise Exception("Python 3.8 or higher is required to produce a wheel.")
  522. if platform is None:
  523. # Determine the platform from the build.
  524. platform_dat = os.path.join(output_dir, 'tmp', 'platform.dat')
  525. cmake_cache = os.path.join(output_dir, 'CMakeCache.txt')
  526. if os.path.isfile(platform_dat):
  527. # This is written by makepanda.
  528. platform = open(platform_dat, 'r').read().strip()
  529. elif os.path.isfile(cmake_cache):
  530. # This variable is written to the CMake cache by Package.cmake.
  531. for line in open(cmake_cache, 'r').readlines():
  532. if line.startswith('PYTHON_PLATFORM_TAG:STRING='):
  533. platform = line[27:].strip()
  534. break
  535. if not platform:
  536. raise Exception("Could not find PYTHON_PLATFORM_TAG in CMakeCache.txt, specify --platform manually.")
  537. else:
  538. print("Could not find platform.dat or CMakeCache.txt in build directory")
  539. platform = get_platform()
  540. if platform.startswith("linux-") and os.path.isdir("/opt/python"):
  541. # Is this manylinux?
  542. if os.path.isfile("/lib/libc-2.5.so") or os.path.isfile("/lib64/libc-2.5.so"):
  543. platform = platform.replace("linux", "manylinux1")
  544. elif os.path.isfile("/lib/libc-2.12.so") or os.path.isfile("/lib64/libc-2.12.so"):
  545. platform = platform.replace("linux", "manylinux2010")
  546. elif os.path.isfile("/lib/libc-2.17.so") or os.path.isfile("/lib64/libc-2.17.so"):
  547. platform = platform.replace("linux", "manylinux2014")
  548. elif os.path.isfile("/lib/i386-linux-gnu/libc-2.24.so") or os.path.isfile("/lib/x86_64-linux-gnu/libc-2.24.so"):
  549. platform = platform.replace("linux", "manylinux_2_24")
  550. elif os.path.isfile("/lib64/libc-2.28.so") and os.path.isfile('/etc/almalinux-release'):
  551. platform = platform.replace("linux", "manylinux_2_28")
  552. platform = platform.replace('-', '_').replace('.', '_')
  553. is_windows = platform == 'win32' \
  554. or platform.startswith('win_') \
  555. or platform.startswith('cygwin_')
  556. is_macosx = platform.startswith('macosx_')
  557. is_android = platform.startswith('android_')
  558. # Global filepaths
  559. panda3d_dir = join(output_dir, "panda3d")
  560. pandac_dir = join(output_dir, "pandac")
  561. direct_dir = join(output_dir, "direct")
  562. models_dir = join(output_dir, "models")
  563. etc_dir = join(output_dir, "etc")
  564. bin_dir = join(output_dir, "bin")
  565. if is_windows:
  566. libs_dir = join(output_dir, "bin")
  567. else:
  568. libs_dir = join(output_dir, "lib")
  569. ext_mod_dir = get_python_ext_module_dir()
  570. license_src = "LICENSE"
  571. readme_src = "README.md"
  572. # Update relevant METADATA entries
  573. METADATA['version'] = version
  574. # Build out the metadata
  575. details = METADATA["extensions"]["python.details"]
  576. homepage = details["project_urls"]["Home"]
  577. author = details["contacts"][0]["name"]
  578. email = details["contacts"][0]["email"]
  579. metadata = ''.join([
  580. "Metadata-Version: {metadata_version}\n" \
  581. "Name: {name}\n" \
  582. "Version: {version}\n" \
  583. "Summary: {summary}\n" \
  584. "License: {license}\n".format(**METADATA),
  585. "Home-page: {0}\n".format(homepage),
  586. ] + ["Project-URL: {0}, {1}\n".format(*url) for url in PROJECT_URLS.items()] + [
  587. "Author: {0}\n".format(author),
  588. "Author-email: {0}\n".format(email),
  589. "Platform: {0}\n".format(platform),
  590. ] + ["Classifier: {0}\n".format(c) for c in METADATA['classifiers']])
  591. metadata += '\n' + DESCRIPTION.strip() + '\n'
  592. # Zip it up and name it the right thing
  593. whl = WheelFile('panda3d', version, platform)
  594. whl.lib_path = [libs_dir]
  595. if is_windows:
  596. whl.lib_path.append(ext_mod_dir)
  597. if platform.startswith("manylinux"):
  598. # On manylinux1, we pick up all libraries except for the ones specified
  599. # by the manylinux1 ABI.
  600. whl.lib_path.append("/usr/local/lib")
  601. if platform.endswith("_x86_64"):
  602. whl.lib_path += ["/lib64", "/usr/lib64"]
  603. else:
  604. whl.lib_path += ["/lib", "/usr/lib"]
  605. whl.ignore_deps.update(MANYLINUX_LIBS)
  606. # Add libpython for deployment.
  607. suffix = ''
  608. gil_disabled = get_config_var("Py_GIL_DISABLED")
  609. if gil_disabled and int(gil_disabled):
  610. suffix = 't'
  611. if is_windows:
  612. pylib_name = 'python{0}{1}{2}.dll'.format(sys.version_info[0], sys.version_info[1], suffix)
  613. pylib_path = os.path.join(get_config_var('BINDIR'), pylib_name)
  614. elif is_macosx:
  615. pylib_name = 'libpython{0}.{1}{2}.dylib'.format(sys.version_info[0], sys.version_info[1], suffix)
  616. pylib_path = os.path.join(get_config_var('LIBDIR'), pylib_name)
  617. elif is_android and CrossCompiling():
  618. pylib_name = 'libpython{0}.{1}{2}.so'.format(sys.version_info[0], sys.version_info[1], suffix)
  619. pylib_path = os.path.join(GetThirdpartyDir(), 'python', 'lib', pylib_name)
  620. else:
  621. pylib_name = get_config_var('LDLIBRARY')
  622. pylib_arch = get_config_var('MULTIARCH')
  623. libdir = get_config_var('LIBDIR')
  624. if pylib_arch and os.path.exists(os.path.join(libdir, pylib_arch, pylib_name)):
  625. pylib_path = os.path.join(libdir, pylib_arch, pylib_name)
  626. else:
  627. pylib_path = os.path.join(libdir, pylib_name)
  628. # If Python was linked statically, we don't need to include this.
  629. if not pylib_name.endswith('.a'):
  630. whl.write_file('deploy_libs/' + pylib_name, pylib_path)
  631. # Add the trees with Python modules.
  632. whl.write_directory('direct', direct_dir)
  633. # Write the panda3d tree. We use a custom empty __init__ since the
  634. # default one adds the bin directory to the PATH, which we don't have.
  635. p3d_init = """"Python bindings for the Panda3D libraries"
  636. __version__ = '{0}'
  637. """.format(version)
  638. if '27' in ABI_TAG:
  639. p3d_init += """
  640. if __debug__:
  641. if 1 / 2 == 0:
  642. raise ImportError(\"Python 2 is not supported.\")
  643. """
  644. whl.write_file_data('panda3d/__init__.py', p3d_init)
  645. # Copy the extension modules from the panda3d directory.
  646. ext_suffix = GetExtensionSuffix()
  647. for file in sorted(os.listdir(panda3d_dir)):
  648. if file == '__init__.py':
  649. pass
  650. elif file.endswith('.py') or (file.endswith(ext_suffix) and '.' not in file[:-len(ext_suffix)]):
  651. source_path = os.path.join(panda3d_dir, file)
  652. if file.endswith('.pyd') and platform.startswith('cygwin'):
  653. # Rename it to .dll for cygwin Python to be able to load it.
  654. target_path = 'panda3d/' + os.path.splitext(file)[0] + '.dll'
  655. elif file.endswith(ext_suffix) and platform.startswith('android'):
  656. # Strip the extension suffix on Android.
  657. target_path = 'panda3d/' + file[:-len(ext_suffix)] + '.so'
  658. else:
  659. target_path = 'panda3d/' + file
  660. whl.write_file(target_path, source_path)
  661. # And copy the extension modules from the Python installation into the
  662. # deploy_libs directory, for use by deploy-ng.
  663. ext_suffix = '.pyd' if is_windows else '.so'
  664. for file in sorted(os.listdir(ext_mod_dir)):
  665. if file.endswith(ext_suffix):
  666. if file.startswith('_tkinter.'):
  667. # Tkinter is supplied in a separate wheel.
  668. continue
  669. source_path = os.path.join(ext_mod_dir, file)
  670. if file.endswith('.pyd') and platform.startswith('cygwin'):
  671. # Rename it to .dll for cygwin Python to be able to load it.
  672. target_path = 'deploy_libs/' + os.path.splitext(file)[0] + '.dll'
  673. else:
  674. target_path = 'deploy_libs/' + file
  675. whl.write_file(target_path, source_path)
  676. # Include the special sysconfigdata module.
  677. if os.name == 'posix':
  678. import sysconfig
  679. if hasattr(sysconfig, '_get_sysconfigdata_name'):
  680. modname = sysconfig._get_sysconfigdata_name() + '.py'
  681. else:
  682. modname = '_sysconfigdata.py'
  683. for entry in sys.path:
  684. source_path = os.path.join(entry, modname)
  685. if os.path.isfile(source_path):
  686. whl.write_file('deploy_libs/' + modname, source_path)
  687. break
  688. # Add plug-ins.
  689. for lib in PLUGIN_LIBS:
  690. plugin_name = 'lib' + lib
  691. if is_windows:
  692. plugin_name += '.dll'
  693. elif is_macosx:
  694. plugin_name += '.dylib'
  695. else:
  696. plugin_name += '.so'
  697. plugin_path = os.path.join(libs_dir, plugin_name)
  698. if os.path.isfile(plugin_path):
  699. whl.write_file('panda3d/' + plugin_name, plugin_path)
  700. if platform.startswith('android'):
  701. deploy_stub_path = os.path.join(libs_dir, 'libdeploy-stubw.so')
  702. if os.path.isfile(deploy_stub_path):
  703. whl.write_file('deploy_libs/libdeploy-stubw.so', deploy_stub_path)
  704. classes_dex_path = os.path.join(output_dir, 'classes.dex')
  705. if os.path.isfile(classes_dex_path):
  706. whl.write_file('deploy_libs/classes.dex', classes_dex_path)
  707. # Add the .data directory, containing additional files.
  708. data_dir = 'panda3d-{0}.data'.format(version)
  709. #whl.write_directory(data_dir + '/data/etc', etc_dir)
  710. #whl.write_directory(data_dir + '/data/models', models_dir)
  711. # Actually, let's not. That seems to install the files to the strangest
  712. # places in the user's filesystem. Let's instead put them in panda3d.
  713. whl.write_directory('panda3d/etc', etc_dir)
  714. whl.write_directory('panda3d/models', models_dir)
  715. # Add the pandac tree for backward compatibility.
  716. for file in sorted(os.listdir(pandac_dir)):
  717. if file.endswith('.py'):
  718. whl.write_file('pandac/' + file, os.path.join(pandac_dir, file))
  719. # Let's also add the interrogate databases.
  720. input_dir = os.path.join(pandac_dir, 'input')
  721. if os.path.isdir(input_dir):
  722. for file in sorted(os.listdir(input_dir)):
  723. if file.endswith('.in'):
  724. whl.write_file('pandac/input/' + file, os.path.join(input_dir, file))
  725. # Add a panda3d-tools directory containing the executables.
  726. entry_points = '[console_scripts]\n'
  727. entry_points += 'eggcacher = direct.directscripts.eggcacher:main\n'
  728. entry_points += 'pfreeze = direct.dist.pfreeze:main\n'
  729. tools_init = ''
  730. for file in sorted(os.listdir(bin_dir)):
  731. basename = os.path.splitext(file)[0]
  732. if basename in EXCLUDE_BINARIES:
  733. continue
  734. source_path = os.path.join(bin_dir, file)
  735. if is_executable(source_path):
  736. # Put the .exe files inside the panda3d-tools directory.
  737. whl.write_file('panda3d_tools/' + file, source_path)
  738. if basename.endswith('_bin'):
  739. # These tools won't be invoked by the user directly.
  740. continue
  741. # Tell pip to create a wrapper script.
  742. funcname = basename.replace('-', '_')
  743. entry_points += '{0} = panda3d_tools:{1}\n'.format(basename, funcname)
  744. tools_init += '{0} = lambda: _exec_tool({1!r})\n'.format(funcname, file)
  745. entry_points += '[distutils.commands]\n'
  746. entry_points += 'build_apps = direct.dist.commands:build_apps\n'
  747. entry_points += 'bdist_apps = direct.dist.commands:bdist_apps\n'
  748. entry_points += '[setuptools.finalize_distribution_options]\n'
  749. entry_points += 'build_apps = direct.dist._dist_hooks:finalize_distribution_options\n'
  750. whl.write_file_data('panda3d_tools/__init__.py', PANDA3D_TOOLS_INIT.format(tools_init))
  751. # Add the dist-info directory last.
  752. info_dir = 'panda3d-{0}.dist-info'.format(version)
  753. whl.write_file_data(info_dir + '/entry_points.txt', entry_points)
  754. whl.write_file_data(info_dir + '/metadata.json', json.dumps(METADATA, indent=4, separators=(',', ': ')))
  755. whl.write_file_data(info_dir + '/METADATA', metadata)
  756. whl.write_file_data(info_dir + '/WHEEL', WHEEL_DATA.format(PY_VERSION, ABI_TAG, platform))
  757. whl.write_file(info_dir + '/LICENSE.txt', license_src)
  758. whl.write_file(info_dir + '/README.md', readme_src)
  759. whl.write_file_data(info_dir + '/top_level.txt', 'direct\npanda3d\npandac\npanda3d_tools\n')
  760. whl.close()
  761. if __name__ == "__main__":
  762. version = GetMetadataValue('version')
  763. parser = OptionParser()
  764. parser.add_option('', '--version', dest = 'version', help = 'Panda3D version number (default: %s)' % (version), default = version)
  765. parser.add_option('', '--outputdir', dest = 'outputdir', help = 'Makepanda\'s output directory (default: built)', default = 'built')
  766. parser.add_option('', '--verbose', dest = 'verbose', help = 'Enable verbose output', action = 'store_true', default = False)
  767. parser.add_option('', '--platform', dest = 'platform', help = 'Override platform tag', default = None)
  768. (options, args) = parser.parse_args()
  769. SetVerbose(options.verbose)
  770. makewheel(options.version, options.outputdir, options.platform)