Parcourir la source

Merge branch 'development' into null_uint_test

Simon Krajewski il y a 5 mois
Parent
commit
87dc54ca11
100 fichiers modifiés avec 3637 ajouts et 3949 suppressions
  1. 0 24
      .devcontainer/devcontainer.json
  2. 0 28
      .devcontainer/docker-compose.yml
  3. 0 454
      .devcontainer/library-scripts/common-debian.sh
  4. 0 309
      .devcontainer/library-scripts/docker-debian.sh
  5. 0 6
      .earthlyignore
  6. 0 14
      .github/workflows/cancel.yml
  7. 276 138
      .github/workflows/main.yml
  8. 19 0
      .github/workflows/target.yml
  9. 2 6
      .gitignore
  10. 5 1
      .vscode/settings.json
  11. 8 5
      .vscode/tasks.json
  12. 0 411
      Earthfile
  13. 35 23
      Makefile
  14. 7 4
      Makefile.win
  15. 1 2
      README.md
  16. 96 0
      WinSetup.ps1
  17. 2 2
      dune
  18. 0 4
      dune-project
  19. 0 2
      dune-workspace.dev
  20. 72 0
      extra/CHANGES.txt
  21. 23 3
      extra/LICENSE.txt
  22. 16 12
      extra/github-actions/build-mac.yml
  23. 10 26
      extra/github-actions/build-windows.yml
  24. 2 0
      extra/github-actions/install-neko-unix.yml
  25. 1 1
      extra/github-actions/install-nsis.yml
  26. 15 5
      extra/github-actions/install-ocaml-libs-windows.yml
  27. 7 31
      extra/github-actions/install-ocaml-windows.yml
  28. 0 20
      extra/github-actions/install-ocaml-windows64.yml
  29. 1 1
      extra/github-actions/test-windows.yml
  30. 177 82
      extra/github-actions/workflows/main.yml
  31. 1 1
      extra/haxelib_src
  32. 6 5
      haxe.opam
  33. 0 18
      libs/.gitignore
  34. 0 23
      libs/Makefile
  35. 0 30
      libs/extc/Makefile
  36. 1 7
      libs/extc/process_stubs.c
  37. 501 0
      libs/extlib-leftovers/LICENSE
  38. 0 35
      libs/extlib-leftovers/Makefile
  39. 2 2
      libs/mbedtls/mbedtls.ml
  40. 98 52
      libs/mbedtls/mbedtls_stubs.c
  41. 0 23
      libs/neko/Makefile
  42. 7 0
      libs/objsize/LICENSE
  43. 0 29
      libs/objsize/Makefile
  44. 0 28
      libs/pcre2/Makefile
  45. 0 81
      libs/swflib/Makefile
  46. 0 22
      libs/ziplib/Makefile
  47. 0 7
      libs/ziplib/test/Makefile
  48. 67 10
      src-json/define.json
  49. 7 1
      src-json/meta.json
  50. 13 1
      src-json/warning.json
  51. 0 14
      src-prebuild/dune
  52. 4 351
      src/codegen/codegen.ml
  53. 223 0
      src/codegen/dump.ml
  54. 132 0
      src/codegen/fixOverrides.ml
  55. 17 14
      src/codegen/genxml.ml
  56. 42 39
      src/codegen/javaModern.ml
  57. 9 7
      src/codegen/swfLoader.ml
  58. 18 18
      src/compiler/args.ml
  59. 56 25
      src/compiler/compilationCache.ml
  60. 15 5
      src/compiler/compilationContext.ml
  61. 109 63
      src/compiler/compiler.ml
  62. 7 8
      src/compiler/displayOutput.ml
  63. 45 52
      src/compiler/displayProcessing.ml
  64. 5 0
      src/compiler/dune
  65. 63 39
      src/compiler/generate.ml
  66. 2 3
      src/compiler/haxe.ml
  67. 0 2
      src/compiler/helper.ml
  68. 16 10
      src/compiler/hxb/hxbData.ml
  69. 26 20
      src/compiler/hxb/hxbLib.ml
  70. 243 117
      src/compiler/hxb/hxbReader.ml
  71. 14 0
      src/compiler/hxb/hxbReaderApi.ml
  72. 179 121
      src/compiler/hxb/hxbWriter.ml
  73. 1 1
      src/compiler/hxb/hxbWriterConfig.ml
  74. 91 79
      src/compiler/messageReporting.ml
  75. 79 69
      src/compiler/server.ml
  76. 2 10
      src/compiler/serverCompilationContext.ml
  77. 1 3
      src/compiler/serverConfig.ml
  78. 2 2
      src/compiler/tasks.ml
  79. 40 27
      src/context/abstractCast.ml
  80. 100 257
      src/context/common.ml
  81. 34 15
      src/context/commonCache.ml
  82. 1 1
      src/context/display/diagnostics.ml
  83. 3 12
      src/context/display/display.ml
  84. 2 1
      src/context/display/displayException.ml
  85. 3 1
      src/context/display/displayFields.ml
  86. 32 38
      src/context/display/displayJson.ml
  87. 256 0
      src/context/display/displayMemory.ml
  88. 1 1
      src/context/display/displayPath.ml
  89. 9 8
      src/context/display/displayTexpr.ml
  90. 211 211
      src/context/display/displayToplevel.ml
  91. 1 1
      src/context/display/documentSymbols.ml
  92. 4 4
      src/context/display/exprPreprocessing.ml
  93. 25 29
      src/context/display/findReferences.ml
  94. 6 5
      src/context/display/importHandling.ml
  95. 4 8
      src/context/display/statistics.ml
  96. 18 18
      src/context/display/syntaxExplorer.ml
  97. 3 3
      src/context/formatString.ml
  98. 0 50
      src/context/lookup.ml
  99. 0 197
      src/context/memory.ml
  100. 5 1
      src/context/nativeLibraries.ml

+ 0 - 24
.devcontainer/devcontainer.json

@@ -1,24 +0,0 @@
-// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
-// https://github.com/microsoft/vscode-dev-containers/tree/v0.202.5/containers/docker-from-docker-compose
-{
-	"name": "haxe",
-	"dockerComposeFile": "docker-compose.yml",
-	"service": "workspace",
-	"workspaceFolder": "/workspace",
-
-	// Use this environment variable if you need to bind mount your local source code into a new container.
-	"remoteEnv": {
-		"LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}"
-	},
-	
-	// Set *default* container specific settings.json values on container create.
-	"settings": {},
-
-	"extensions": [
-		"nadako.vshaxe",
-		"ms-azuretools.vscode-docker",
-		"earthly.earthfile-syntax-highlighting",
-	],
-
-	"remoteUser": "vscode"
-}

+ 0 - 28
.devcontainer/docker-compose.yml

@@ -1,28 +0,0 @@
-version: '3'
-services:
-  workspace:
-    image: ghcr.io/haxefoundation/haxe_devcontainer:development
-    init: true
-    volumes:
-      - /var/run/docker.sock:/var/run/docker-host.sock
-      - ..:/workspace:cached
-    environment:
-      - EARTHLY_BUILDKIT_HOST=tcp://earthly:8372
-      - EARTHLY_USE_INLINE_CACHE=true
-      - EARTHLY_SAVE_INLINE_CACHE=true
-    user: vscode
-    entrypoint: /usr/local/share/docker-init.sh
-    command: sleep infinity
-  earthly:
-    image: earthly/buildkitd:v0.6.13
-    privileged: true
-    environment:
-      - BUILDKIT_TCP_TRANSPORT_ENABLED=true
-    expose:
-      - 8372
-    volumes:
-      # https://docs.earthly.dev/docs/guides/using-the-earthly-docker-images/buildkit-standalone#earthly_tmp_dir
-      - earthly-tmp:/tmp/earthly:rw
-
-volumes:
-  earthly-tmp:

+ 0 - 454
.devcontainer/library-scripts/common-debian.sh

@@ -1,454 +0,0 @@
-#!/usr/bin/env bash
-#-------------------------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
-#-------------------------------------------------------------------------------------------------------------
-#
-# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
-# Maintainer: The VS Code and Codespaces Teams
-#
-# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
-
-set -e
-
-INSTALL_ZSH=${1:-"true"}
-USERNAME=${2:-"automatic"}
-USER_UID=${3:-"automatic"}
-USER_GID=${4:-"automatic"}
-UPGRADE_PACKAGES=${5:-"true"}
-INSTALL_OH_MYS=${6:-"true"}
-ADD_NON_FREE_PACKAGES=${7:-"false"}
-SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
-MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
-
-if [ "$(id -u)" -ne 0 ]; then
-    echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
-    exit 1
-fi
-
-# Ensure that login shells get the correct path if the user updated the PATH using ENV.
-rm -f /etc/profile.d/00-restore-env.sh
-echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
-chmod +x /etc/profile.d/00-restore-env.sh
-
-# If in automatic mode, determine if a user already exists, if not use vscode
-if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
-    USERNAME=""
-    POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
-    for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
-        if id -u ${CURRENT_USER} > /dev/null 2>&1; then
-            USERNAME=${CURRENT_USER}
-            break
-        fi
-    done
-    if [ "${USERNAME}" = "" ]; then
-        USERNAME=vscode
-    fi
-elif [ "${USERNAME}" = "none" ]; then
-    USERNAME=root
-    USER_UID=0
-    USER_GID=0
-fi
-
-# Load markers to see which steps have already run
-if [ -f "${MARKER_FILE}" ]; then
-    echo "Marker file found:"
-    cat "${MARKER_FILE}"
-    source "${MARKER_FILE}"
-fi
-
-# Ensure apt is in non-interactive to avoid prompts
-export DEBIAN_FRONTEND=noninteractive
-
-# Function to call apt-get if needed
-apt_get_update_if_needed()
-{
-    if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
-        echo "Running apt-get update..."
-        apt-get update
-    else
-        echo "Skipping apt-get update."
-    fi
-}
-
-# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
-if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
-
-    package_list="apt-utils \
-        openssh-client \
-        gnupg2 \
-        dirmngr \
-        iproute2 \
-        procps \
-        lsof \
-        htop \
-        net-tools \
-        psmisc \
-        curl \
-        wget \
-        rsync \
-        ca-certificates \
-        unzip \
-        zip \
-        nano \
-        vim-tiny \
-        less \
-        jq \
-        lsb-release \
-        apt-transport-https \
-        dialog \
-        libc6 \
-        libgcc1 \
-        libkrb5-3 \
-        libgssapi-krb5-2 \
-        libicu[0-9][0-9] \
-        liblttng-ust0 \
-        libstdc++6 \
-        zlib1g \
-        locales \
-        sudo \
-        ncdu \
-        man-db \
-        strace \
-        manpages \
-        manpages-dev \
-        init-system-helpers"
-        
-    # Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
-    if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
-        # Bring in variables from /etc/os-release like VERSION_CODENAME
-        . /etc/os-release
-        sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
-        sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
-        sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
-        sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
-        sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
-        sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
-        sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list 
-        sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
-        # Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
-        sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
-        sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
-        echo "Running apt-get update..."
-        apt-get update
-        package_list="${package_list} manpages-posix manpages-posix-dev"
-    else
-        apt_get_update_if_needed
-    fi
-
-    # Install libssl1.1 if available
-    if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
-        package_list="${package_list}       libssl1.1"
-    fi
-    
-    # Install appropriate version of libssl1.0.x if available
-    libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
-    if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
-        if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
-            # Debian 9
-            package_list="${package_list}       libssl1.0.2"
-        elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
-            # Ubuntu 18.04, 16.04, earlier
-            package_list="${package_list}       libssl1.0.0"
-        fi
-    fi
-
-    echo "Packages to verify are installed: ${package_list}"
-    apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
-        
-    # Install git if not already installed (may be more recent than distro version)
-    if ! type git > /dev/null 2>&1; then
-        apt-get -y install --no-install-recommends git
-    fi
-
-    PACKAGES_ALREADY_INSTALLED="true"
-fi
-
-# Get to latest versions of all packages
-if [ "${UPGRADE_PACKAGES}" = "true" ]; then
-    apt_get_update_if_needed
-    apt-get -y upgrade --no-install-recommends
-    apt-get autoremove -y
-fi
-
-# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
-# Common need for both applications and things like the agnoster ZSH theme.
-if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
-    echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen 
-    locale-gen
-    LOCALE_ALREADY_SET="true"
-fi
-
-# Create or update a non-root user to match UID/GID.
-group_name="${USERNAME}"
-if id -u ${USERNAME} > /dev/null 2>&1; then
-    # User exists, update if needed
-    if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then 
-        group_name="$(id -gn $USERNAME)"
-        groupmod --gid $USER_GID ${group_name}
-        usermod --gid $USER_GID $USERNAME
-    fi
-    if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then 
-        usermod --uid $USER_UID $USERNAME
-    fi
-else
-    # Create user
-    if [ "${USER_GID}" = "automatic" ]; then
-        groupadd $USERNAME
-    else
-        groupadd --gid $USER_GID $USERNAME
-    fi
-    if [ "${USER_UID}" = "automatic" ]; then 
-        useradd -s /bin/bash --gid $USERNAME -m $USERNAME
-    else
-        useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
-    fi
-fi
-
-# Add add sudo support for non-root user
-if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
-    echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
-    chmod 0440 /etc/sudoers.d/$USERNAME
-    EXISTING_NON_ROOT_USER="${USERNAME}"
-fi
-
-# ** Shell customization section **
-if [ "${USERNAME}" = "root" ]; then 
-    user_rc_path="/root"
-else
-    user_rc_path="/home/${USERNAME}"
-fi
-
-# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
-if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
-    cp  /etc/skel/.bashrc "${user_rc_path}/.bashrc"
-fi
-
-# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
-if  [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
-    cp  /etc/skel/.profile "${user_rc_path}/.profile"
-fi
-
-# .bashrc/.zshrc snippet
-rc_snippet="$(cat << 'EOF'
-
-if [ -z "${USER}" ]; then export USER=$(whoami); fi
-if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
-
-# Display optional first run image specific notice if configured and terminal is interactive
-if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
-    if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
-        cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
-    elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
-        cat "/workspaces/.codespaces/shared/first-run-notice.txt"
-    fi
-    mkdir -p "$HOME/.config/vscode-dev-containers"
-    # Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
-    ((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
-fi
-
-# Set the default git editor if not already set
-if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
-    if  [ "${TERM_PROGRAM}" = "vscode" ]; then
-        if [[ -n $(command -v code-insiders) &&  -z $(command -v code) ]]; then 
-            export GIT_EDITOR="code-insiders --wait"
-        else 
-            export GIT_EDITOR="code --wait"
-        fi
-    fi
-fi
-
-EOF
-)"
-
-# code shim, it fallbacks to code-insiders if code is not available
-cat << 'EOF' > /usr/local/bin/code
-#!/bin/sh
-
-get_in_path_except_current() {
-    which -a "$1" | grep -A1 "$0" | grep -v "$0"
-}
-
-code="$(get_in_path_except_current code)"
-
-if [ -n "$code" ]; then
-    exec "$code" "$@"
-elif [ "$(command -v code-insiders)" ]; then
-    exec code-insiders "$@"
-else
-    echo "code or code-insiders is not installed" >&2
-    exit 127
-fi
-EOF
-chmod +x /usr/local/bin/code
-
-# systemctl shim - tells people to use 'service' if systemd is not running
-cat << 'EOF' > /usr/local/bin/systemctl
-#!/bin/sh
-set -e
-if [ -d "/run/systemd/system" ]; then
-    exec /bin/systemctl/systemctl "$@"
-else
-    echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services intead. e.g.: \n\nservice --status-all'
-fi
-EOF
-chmod +x /usr/local/bin/systemctl
-
-# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
-codespaces_bash="$(cat \
-<<'EOF'
-
-# Codespaces bash prompt theme
-__bash_prompt() {
-    local userpart='`export XIT=$? \
-        && [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
-        && [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
-    local gitbranch='`\
-        if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
-            export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
-            if [ "${BRANCH}" != "" ]; then \
-                echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
-                && if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
-                        echo -n " \[\033[1;33m\]✗"; \
-                fi \
-                && echo -n "\[\033[0;36m\]) "; \
-            fi; \
-        fi`'
-    local lightblue='\[\033[1;34m\]'
-    local removecolor='\[\033[0m\]'
-    PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
-    unset -f __bash_prompt
-}
-__bash_prompt
-
-EOF
-)"
-
-codespaces_zsh="$(cat \
-<<'EOF'
-# Codespaces zsh prompt theme
-__zsh_prompt() {
-    local prompt_username
-    if [ ! -z "${GITHUB_USER}" ]; then 
-        prompt_username="@${GITHUB_USER}"
-    else
-        prompt_username="%n"
-    fi
-    PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
-    PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
-    PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
-    PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
-    unset -f __zsh_prompt
-}
-ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
-ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
-ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
-ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
-__zsh_prompt
-
-EOF
-)"
-
-# Add RC snippet and custom bash prompt
-if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
-    echo "${rc_snippet}" >> /etc/bash.bashrc
-    echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
-    echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
-    if [ "${USERNAME}" != "root" ]; then
-        echo "${codespaces_bash}" >> "/root/.bashrc"
-        echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
-    fi
-    chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
-    RC_SNIPPET_ALREADY_ADDED="true"
-fi
-
-# Optionally install and configure zsh and Oh My Zsh!
-if [ "${INSTALL_ZSH}" = "true" ]; then
-    if ! type zsh > /dev/null 2>&1; then
-        apt_get_update_if_needed
-        apt-get install -y zsh
-    fi
-    if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
-        echo "${rc_snippet}" >> /etc/zsh/zshrc
-        ZSH_ALREADY_INSTALLED="true"
-    fi
-
-    # Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
-    # See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
-    oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
-    if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
-        template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
-        user_rc_file="${user_rc_path}/.zshrc"
-        umask g-w,o-w
-        mkdir -p ${oh_my_install_dir}
-        git clone --depth=1 \
-            -c core.eol=lf \
-            -c core.autocrlf=false \
-            -c fsck.zeroPaddedFilemode=ignore \
-            -c fetch.fsck.zeroPaddedFilemode=ignore \
-            -c receive.fsck.zeroPaddedFilemode=ignore \
-            "https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
-        echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
-        sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
-
-        mkdir -p ${oh_my_install_dir}/custom/themes
-        echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
-        # Shrink git while still enabling updates
-        cd "${oh_my_install_dir}"
-        git repack -a -d -f --depth=1 --window=1
-        # Copy to non-root user if one is specified
-        if [ "${USERNAME}" != "root" ]; then
-            cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
-            chown -R ${USERNAME}:${group_name} "${user_rc_path}"
-        fi
-    fi
-fi
-
-# Persist image metadata info, script if meta.env found in same directory
-meta_info_script="$(cat << 'EOF'
-#!/bin/sh
-. /usr/local/etc/vscode-dev-containers/meta.env
-
-# Minimal output
-if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
-    echo "${VERSION}"
-    exit 0
-elif [ "$1" = "release" ]; then
-    echo "${GIT_REPOSITORY_RELEASE}"
-    exit 0
-elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
-    echo "${CONTENTS_URL}"
-    exit 0
-fi
-
-#Full output
-echo
-echo "Development container image information"
-echo
-if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
-if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
-if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
-if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
-if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
-if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
-if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
-echo
-EOF
-)"
-if [ -f "${SCRIPT_DIR}/meta.env" ]; then
-    mkdir -p /usr/local/etc/vscode-dev-containers/
-    cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
-    echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
-    chmod +x /usr/local/bin/devcontainer-info
-fi
-
-# Write marker file
-mkdir -p "$(dirname "${MARKER_FILE}")"
-echo -e "\
-    PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
-    LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
-    EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
-    RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
-    ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
-
-echo "Done!"

+ 0 - 309
.devcontainer/library-scripts/docker-debian.sh

@@ -1,309 +0,0 @@
-#!/usr/bin/env bash
-#-------------------------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
-#-------------------------------------------------------------------------------------------------------------
-#
-# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker.md
-# Maintainer: The VS Code and Codespaces Teams
-#
-# Syntax: ./docker-debian.sh [enable non-root docker socket access flag] [source socket] [target socket] [non-root user] [use moby] [CLI version]
-
-ENABLE_NONROOT_DOCKER=${1:-"true"}
-SOURCE_SOCKET=${2:-"/var/run/docker-host.sock"}
-TARGET_SOCKET=${3:-"/var/run/docker.sock"}
-USERNAME=${4:-"automatic"}
-USE_MOBY=${5:-"true"}
-DOCKER_VERSION=${6:-"latest"}
-MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
-DOCKER_DASH_COMPOSE_VERSION="1"
-
-set -e
-
-if [ "$(id -u)" -ne 0 ]; then
-    echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
-    exit 1
-fi
-
-# Determine the appropriate non-root user
-if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
-    USERNAME=""
-    POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
-    for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
-        if id -u ${CURRENT_USER} > /dev/null 2>&1; then
-            USERNAME=${CURRENT_USER}
-            break
-        fi
-    done
-    if [ "${USERNAME}" = "" ]; then
-        USERNAME=root
-    fi
-elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
-    USERNAME=root
-fi
-
-# Get central common setting
-get_common_setting() {
-    if [ "${common_settings_file_loaded}" != "true" ]; then
-        curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
-        common_settings_file_loaded=true
-    fi
-    if [ -f "/tmp/vsdc-settings.env" ]; then
-        local multi_line=""
-        if [ "$2" = "true" ]; then multi_line="-z"; fi
-        local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
-        if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
-    fi
-    echo "$1=${!1}"
-}
-
-# Function to run apt-get if needed
-apt_get_update_if_needed()
-{
-    if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
-        echo "Running apt-get update..."
-        apt-get update
-    else
-        echo "Skipping apt-get update."
-    fi
-}
-
-# Checks if packages are installed and installs them if not
-check_packages() {
-    if ! dpkg -s "$@" > /dev/null 2>&1; then
-        apt_get_update_if_needed
-        apt-get -y install --no-install-recommends "$@"
-    fi
-}
-
-# Figure out correct version of a three part version number is not passed
-find_version_from_git_tags() {
-    local variable_name=$1
-    local requested_version=${!variable_name}
-    if [ "${requested_version}" = "none" ]; then return; fi
-    local repository=$2
-    local prefix=${3:-"tags/v"}
-    local separator=${4:-"."}
-    local last_part_optional=${5:-"false"}    
-    if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
-        local escaped_separator=${separator//./\\.}
-        local last_part
-        if [ "${last_part_optional}" = "true" ]; then
-            last_part="(${escaped_separator}[0-9]+)?"
-        else
-            last_part="${escaped_separator}[0-9]+"
-        fi
-        local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
-        local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
-        if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
-            declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
-        else
-            set +e
-            declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
-            set -e
-        fi
-    fi
-    if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then
-        echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
-        exit 1
-    fi
-    echo "${variable_name}=${!variable_name}"
-}
-
-# Ensure apt is in non-interactive to avoid prompts
-export DEBIAN_FRONTEND=noninteractive
-
-# Install dependencies
-check_packages apt-transport-https curl ca-certificates gnupg2 dirmngr
-if ! type git > /dev/null 2>&1; then
-    apt_get_update_if_needed
-    apt-get -y install git
-fi
-
-# Source /etc/os-release to get OS info
-. /etc/os-release
-# Fetch host/container arch.
-architecture="$(dpkg --print-architecture)"
-
-# Set up the necessary apt repos (either Microsoft's or Docker's)
-if [ "${USE_MOBY}" = "true" ]; then
-
-    cli_package_name="moby-cli"
-
-    # Import key safely and import Microsoft apt repo
-    get_common_setting MICROSOFT_GPG_KEYS_URI
-    curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
-    echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
-else
-    # Name of proprietary engine package
-    cli_package_name="docker-ce-cli"
-
-    # Import key safely and import Docker apt repo
-    curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg
-    echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list
-fi
-
-# Refresh apt lists
-apt-get update
-
-# Soft version matching for CLI
-if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then
-    # Empty, meaning grab whatever "latest" is in apt repo
-    cli_version_suffix=""
-else    
-    # Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...)
-    docker_version_dot_escaped="${DOCKER_VERSION//./\\.}"
-    docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}"
-    # Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/
-    docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)"
-    set +e # Don't exit if finding version fails - will handle gracefully
-    cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")"
-    set -e
-    if [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ]; then
-        echo "(!) No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:"
-        apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+'
-        exit 1
-    fi
-    echo "cli_version_suffix ${cli_version_suffix}"
-fi
-
-# Install Docker / Moby CLI if not already installed
-if type docker > /dev/null 2>&1; then
-    echo "Docker / Moby CLI already installed."
-else
-    if [ "${USE_MOBY}" = "true" ]; then
-        apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx
-        apt-get -y install --no-install-recommends moby-compose || echo "(*) Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping."
-    else
-        apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix}
-    fi
-fi
-
-# Install Docker Compose if not already installed  and is on a supported architecture
-if type docker-compose > /dev/null 2>&1; then
-    echo "Docker Compose already installed."
-else
-    TARGET_COMPOSE_ARCH="$(uname -m)"
-    if [ "${TARGET_COMPOSE_ARCH}" = "amd64" ]; then
-        TARGET_COMPOSE_ARCH="x86_64"
-    fi
-    if [ "${TARGET_COMPOSE_ARCH}" != "x86_64" ]; then
-        # Use pip to get a version that runns on this architecture
-        if ! dpkg -s python3-minimal python3-pip libffi-dev python3-venv > /dev/null 2>&1; then
-            apt_get_update_if_needed
-            apt-get -y install python3-minimal python3-pip libffi-dev python3-venv
-        fi
-        export PIPX_HOME=/usr/local/pipx
-        mkdir -p ${PIPX_HOME}
-        export PIPX_BIN_DIR=/usr/local/bin
-        export PYTHONUSERBASE=/tmp/pip-tmp
-        export PIP_CACHE_DIR=/tmp/pip-tmp/cache
-        pipx_bin=pipx
-        if ! type pipx > /dev/null 2>&1; then
-            pip3 install --disable-pip-version-check --no-cache-dir --user pipx
-            pipx_bin=/tmp/pip-tmp/bin/pipx
-        fi
-        ${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' docker-compose
-        rm -rf /tmp/pip-tmp
-    else 
-        find_version_from_git_tags DOCKER_DASH_COMPOSE_VERSION "https://github.com/docker/compose" "tags/"
-        echo "(*) Installing docker-compose ${DOCKER_DASH_COMPOSE_VERSION}..."
-        curl -fsSL "https://github.com/docker/compose/releases/download/${DOCKER_DASH_COMPOSE_VERSION}/docker-compose-Linux-x86_64" -o /usr/local/bin/docker-compose
-        chmod +x /usr/local/bin/docker-compose
-    fi
-fi
-
-# If init file already exists, exit
-if [ -f "/usr/local/share/docker-init.sh" ]; then
-    exit 0
-fi
-echo "docker-init doesnt exist, adding..."
-
-# By default, make the source and target sockets the same
-if [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ]; then
-    touch "${SOURCE_SOCKET}"
-    ln -s "${SOURCE_SOCKET}" "${TARGET_SOCKET}"
-fi
-
-# Add a stub if not adding non-root user access, user is root
-if [ "${ENABLE_NONROOT_DOCKER}" = "false" ] || [ "${USERNAME}" = "root" ]; then
-    echo '/usr/bin/env bash -c "\$@"' > /usr/local/share/docker-init.sh
-    chmod +x /usr/local/share/docker-init.sh
-    exit 0
-fi
-
-# If enabling non-root access and specified user is found, setup socat and add script
-chown -h "${USERNAME}":root "${TARGET_SOCKET}"        
-if ! dpkg -s socat > /dev/null 2>&1; then
-    apt_get_update_if_needed
-    apt-get -y install socat
-fi
-tee /usr/local/share/docker-init.sh > /dev/null \
-<< EOF 
-#!/usr/bin/env bash
-#-------------------------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
-#-------------------------------------------------------------------------------------------------------------
-
-set -e
-
-SOCAT_PATH_BASE=/tmp/vscr-docker-from-docker
-SOCAT_LOG=\${SOCAT_PATH_BASE}.log
-SOCAT_PID=\${SOCAT_PATH_BASE}.pid
-
-# Wrapper function to only use sudo if not already root
-sudoIf()
-{
-    if [ "\$(id -u)" -ne 0 ]; then
-        sudo "\$@"
-    else
-        "\$@"
-    fi
-}
-
-# Log messages
-log()
-{
-    echo -e "[\$(date)] \$@" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
-}
-
-echo -e "\n** \$(date) **" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
-log "Ensuring ${USERNAME} has access to ${SOURCE_SOCKET} via ${TARGET_SOCKET}"
-
-# If enabled, try to add a docker group with the right GID. If the group is root, 
-# fall back on using socat to forward the docker socket to another unix socket so 
-# that we can set permissions on it without affecting the host.
-if [ "${ENABLE_NONROOT_DOCKER}" = "true" ] && [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ] && [ "${USERNAME}" != "root" ] && [ "${USERNAME}" != "0" ]; then
-    SOCKET_GID=\$(stat -c '%g' ${SOURCE_SOCKET})
-    if [ "\${SOCKET_GID}" != "0" ]; then
-        log "Adding user to group with GID \${SOCKET_GID}."
-        if [ "\$(cat /etc/group | grep :\${SOCKET_GID}:)" = "" ]; then
-            sudoIf groupadd --gid \${SOCKET_GID} docker-host
-        fi
-        # Add user to group if not already in it
-        if [ "\$(id ${USERNAME} | grep -E "groups.*(=|,)\${SOCKET_GID}\(")" = "" ]; then
-            sudoIf usermod -aG \${SOCKET_GID} ${USERNAME}
-        fi
-    else
-        # Enable proxy if not already running
-        if [ ! -f "\${SOCAT_PID}" ] || ! ps -p \$(cat \${SOCAT_PID}) > /dev/null; then
-            log "Enabling socket proxy."
-            log "Proxying ${SOURCE_SOCKET} to ${TARGET_SOCKET} for vscode"
-            sudoIf rm -rf ${TARGET_SOCKET}
-            (sudoIf socat UNIX-LISTEN:${TARGET_SOCKET},fork,mode=660,user=${USERNAME} UNIX-CONNECT:${SOURCE_SOCKET} 2>&1 | sudoIf tee -a \${SOCAT_LOG} > /dev/null & echo "\$!" | sudoIf tee \${SOCAT_PID} > /dev/null)
-        else
-            log "Socket proxy already running."
-        fi
-    fi
-    log "Success"
-fi
-
-# Execute whatever commands were passed in (if any). This allows us 
-# to set this script to ENTRYPOINT while still executing the default CMD.
-set +e
-exec "\$@"
-EOF
-chmod +x /usr/local/share/docker-init.sh
-chown ${USERNAME}:root /usr/local/share/docker-init.sh
-echo "Done!"

+ 0 - 6
.earthlyignore

@@ -1,6 +0,0 @@
-.github
-.vscode
-Earthfile
-extra/doc
-bin
-out

+ 0 - 14
.github/workflows/cancel.yml

@@ -1,14 +0,0 @@
-name: Cancel previous jobs
-on:
-  workflow_run:
-    workflows: ["CI"]
-    types:
-      - requested
-jobs:
-  cancel:
-    runs-on: ubuntu-latest
-    steps:
-    - name: Cancel previous runs
-      uses: styfle/[email protected]
-      with:
-        workflow_id: ${{ github.event.workflow.id }}

+ 276 - 138
.github/workflows/main.yml

@@ -4,6 +4,13 @@
 name: CI
 on: [push, pull_request]
 
+env:
+  OCAML_VERSION: 5.3.0
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
 jobs:
   windows64-build:
     runs-on: windows-latest
@@ -18,13 +25,8 @@ jobs:
         with:
           submodules: recursive
 
-      - name: Use GNU Tar from msys
-        run: |
-          echo "C:\msys64\usr\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
-          rm C:\msys64\usr\bin\bash.exe
-
       - name: choco install nsis
-        uses: nick-invision/retry@v2
+        uses: nick-invision/retry@v3
         with:
           timeout_minutes: 10
           max_attempts: 10
@@ -51,43 +53,37 @@ jobs:
         run: neko -version 2>&1
 
       - name: Setup ocaml
-        uses: ocaml/setup-ocaml@v2
+        uses: ocaml/setup-ocaml@v3
         with:
-          ocaml-compiler: 4.08.1
-          opam-repositories: |
-            opam-repository-mingw: https://github.com/ocaml-opam/opam-repository-mingw.git#sunset
-            default: https://github.com/ocaml/opam-repository.git
+          ocaml-compiler: ${{ env.OCAML_VERSION }}
           opam-local-packages: |
             haxe.opam
 
       - name: Install dependencies
         shell: pwsh
+        env:
+          MBEDTLS_VERSION: 2.16.3
         run: |
-          Set-PSDebug -Trace 1
-          curl.exe -fsSL -o "libmbedtls.tar.xz" --retry 3 https://github.com/Simn/mingw64-mbedtls/releases/download/2.16.3/mingw64-$($env:MINGW_ARCH)-mbedtls-2.16.3-1.tar.xz
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'curl -L https://cpanmin.us | perl - App::cpanminus')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm IPC::System::Simple module')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm String::ShellQuote')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'echo "$OLDPWD"')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && tar -C / -xvf libmbedtls.tar.xz')
+          curl.exe -fsSL -o "libmbedtls.tar.xz" --retry 3 `
+            https://github.com/Simn/mingw64-mbedtls/releases/download/${{ env.MBEDTLS_VERSION }}/mingw64-${{ env.MINGW_ARCH }}-mbedtls-${{ env.MBEDTLS_VERSION }}-1.tar.xz
+          ${{ env.CYG_ROOT }}\bin\tar.exe -C ${{ env.CYG_ROOT }} -xvf libmbedtls.tar.xz
 
       - name: Install OCaml libraries
-        shell: pwsh
-        run: |
-          Set-PSDebug -Trace 1
-          opam install haxe --deps-only
-          opam list
-
-      - name: Expose mingw dll files
-        shell: pwsh
-        run: Write-Host "::add-path::${env:CYG_ROOT}/usr/$($env:MINGW_ARCH)-w64-mingw32/sys-root/mingw/bin"
-
-      # required to be able to retrieve the revision
-      - name: Mark directory as safe
-        shell: pwsh
-        run: |
-          Set-PSDebug -Trace 1
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'git config --global --add safe.directory "$OLDPWD"')
+        uses: nick-fields/retry@v3
+        with:
+          timeout_minutes: 10
+          max_attempts: 10
+          retry_on: timeout
+          shell: pwsh
+          command: |
+            Set-PSDebug -Trace 1
+            # stop after any command returns an error
+            $PSNativeCommandUseErrorActionPreference = $true
+            $ErrorActionPreference = 'Stop'
+            # see: https://github.com/aantron/luv/issues/162
+            $env:PATH="${env:CYG_ROOT}\bin;${env:CYG_ROOT}\usr\x86_64-w64-mingw32\bin;${env:PATH}"
+            opam install haxe --deps-only
+            opam list
 
       - name: Set ADD_REVISION=1 for non-release
         if: ${{ !startsWith(github.ref, 'refs/tags/') }}
@@ -98,37 +94,28 @@ jobs:
         shell: pwsh
         run: |
           Set-PSDebug -Trace 1
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -s -f Makefile.win -j`nproc` haxe 2>&1')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -s -f Makefile.win haxelib 2>&1')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -f Makefile.win echo_package_files package_bin package_installer_win package_choco 2>&1')
-          dir out
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && cygcheck ./haxe.exe')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && cygcheck ./haxelib.exe')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && ls ./out')
-
-      - name: Check artifact
-        shell: bash
-        run: |
-          ls out
-          # Output should contain binaries zip, installer zip and nupkg
-          [ $(ls -1 out | wc -l) -eq "3" ]
+          # stop after any command returns an error
+          $PSNativeCommandUseErrorActionPreference = $true
+          $ErrorActionPreference = 'Stop'
+          opam exec -- make -s -f Makefile.win -j"$env:NUMBER_OF_PROCESSORS" haxe
+          opam exec -- make -s -f Makefile.win haxelib
+          opam exec -- make -f Makefile.win echo_package_files package_bin package_installer_win package_choco
+          cygcheck ./haxe.exe
+          cygcheck ./haxelib.exe
+          ls ./out
 
       - name: Upload artifact
-        uses: actions/upload-artifact@v3
+        uses: actions/upload-artifact@v4
         with:
           name: win${{env.ARCH}}Binaries
           path: out
 
 
   linux-build:
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       OPAMYES: 1
-    strategy:
-      fail-fast: false
-      matrix:
-        ocaml: ["4.08.1", "5.0.0"]
     steps:
       - uses: actions/checkout@main
         with:
@@ -136,10 +123,10 @@ jobs:
 
       - name: Cache opam
         id: cache-opam
-        uses: actions/cache@v3.0.11
+        uses: actions/cache@v4
         with:
           path: ~/.opam/
-          key: ${{ runner.os }}-${{ matrix.ocaml }}-${{ hashFiles('./haxe.opam', './libs/') }}
+          key: ${{ runner.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
       - name: Install Neko from S3
         run: |
@@ -149,9 +136,11 @@ jobs:
           tar -xf $RUNNER_TEMP/neko_latest.tar.gz -C $RUNNER_TEMP
           NEKOPATH=`echo $RUNNER_TEMP/neko-*-*`
           sudo mkdir -p /usr/local/bin
+          sudo mkdir -p /usr/local/include
           sudo mkdir -p /usr/local/lib/neko
           sudo ln -s $NEKOPATH/{neko,nekoc,nekoml,nekotools}  /usr/local/bin/
           sudo ln -s $NEKOPATH/libneko.*                      /usr/local/lib/
+          sudo ln -s $NEKOPATH/include/*                      /usr/local/include/
           sudo ln -s $NEKOPATH/*.ndll                         /usr/local/lib/neko/
           echo "NEKOPATH=$NEKOPATH" >> $GITHUB_ENV
 
@@ -162,18 +151,17 @@ jobs:
       - name: Install dependencies
         run: |
           set -ex
-          sudo add-apt-repository ppa:avsm/ppa -y # provides OPAM 2
-          sudo add-apt-repository ppa:haxe/ocaml -y # provides newer version of mbedtls
           sudo apt-get update -qqy
-          sudo apt-get install -qqy ocaml-nox camlp5 opam libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build libstring-shellquote-perl libipc-system-simple-perl
+          sudo apt-get install -qqy darcs bubblewrap ocaml-nox libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build
+          curl -sSL https://github.com/ocaml/opam/releases/download/2.3.0/opam-2.3.0-x86_64-linux -o $RUNNER_TEMP/opam
+          sudo install $RUNNER_TEMP/opam /usr/local/bin/opam
 
       - name: Install OCaml libraries
         if: steps.cache-opam.outputs.cache-hit != 'true'
         run: |
           set -ex
-          opam init # --disable-sandboxing
+          opam init -c ${{ env.OCAML_VERSION }}
           opam update
-          opam switch create ${{ matrix.ocaml }}
           opam pin add haxe . --no-action
           opam install haxe --deps-only --assume-depexts
           opam list
@@ -201,7 +189,6 @@ jobs:
         run: echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
 
       - name: Build xmldoc
-        if: matrix.ocaml == '4.08.1'
         run: |
           set -ex
           make -s xmldoc
@@ -213,21 +200,20 @@ jobs:
           EOL
 
       - name: Upload artifact
-        uses: actions/upload-artifact@v3
+        uses: actions/upload-artifact@v4
         with:
-          name: linuxBinaries${{ (matrix.ocaml == '5.0.0' && '_ocaml5') || '' }}
+          name: linuxBinaries
           path: out
 
       - name: Upload xmldoc artifact
-        uses: actions/upload-artifact@v3
-        if: matrix.ocaml == '4.08.1'
+        uses: actions/upload-artifact@v4
         with:
           name: xmldoc
           path: extra/doc
 
   linux-test:
     needs: linux-build
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       TEST: ${{matrix.target}}
@@ -236,7 +222,6 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        ocaml: ["4.08.1", "5.0.0"]
         target: [macro, js, hl, cpp, jvm, php, python, lua, flash, neko]
         include:
           - target: hl
@@ -251,9 +236,9 @@ jobs:
       - uses: actions/checkout@main
         with:
           submodules: recursive
-      - uses: actions/download-artifact@v3
+      - uses: actions/download-artifact@v4
         with:
-          name: linuxBinaries${{ (matrix.ocaml == '5.0.0' && '_ocaml5') || '' }}
+          name: linuxBinaries
           path: linuxBinaries
 
       - name: Install Neko from S3
@@ -264,9 +249,11 @@ jobs:
           tar -xf $RUNNER_TEMP/neko_latest.tar.gz -C $RUNNER_TEMP
           NEKOPATH=`echo $RUNNER_TEMP/neko-*-*`
           sudo mkdir -p /usr/local/bin
+          sudo mkdir -p /usr/local/include
           sudo mkdir -p /usr/local/lib/neko
           sudo ln -s $NEKOPATH/{neko,nekoc,nekoml,nekotools}  /usr/local/bin/
           sudo ln -s $NEKOPATH/libneko.*                      /usr/local/lib/
+          sudo ln -s $NEKOPATH/include/*                      /usr/local/include/
           sudo ln -s $NEKOPATH/*.ndll                         /usr/local/lib/neko/
           echo "NEKOPATH=$NEKOPATH" >> $GITHUB_ENV
 
@@ -309,10 +296,11 @@ jobs:
       - name: Test
         run: haxe RunCi.hxml
         working-directory: ${{github.workspace}}/tests
+        timeout-minutes: 20
 
   test-docgen:
     needs: linux-build
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       HXCPP_COMPILE_CACHE: ~/hxcache
@@ -321,13 +309,13 @@ jobs:
         with:
           submodules: recursive
 
-      - uses: actions/download-artifact@v3
+      - uses: actions/download-artifact@v4
         with:
           name: linuxBinaries
           path: linuxBinaries
 
       - name: Download xmldoc artifact
-        uses: actions/download-artifact@v3
+        uses: actions/download-artifact@v4
         with:
           name: xmldoc
           path: xmldoc
@@ -340,9 +328,11 @@ jobs:
           tar -xf $RUNNER_TEMP/neko_latest.tar.gz -C $RUNNER_TEMP
           NEKOPATH=`echo $RUNNER_TEMP/neko-*-*`
           sudo mkdir -p /usr/local/bin
+          sudo mkdir -p /usr/local/include
           sudo mkdir -p /usr/local/lib/neko
           sudo ln -s $NEKOPATH/{neko,nekoc,nekoml,nekotools}  /usr/local/bin/
           sudo ln -s $NEKOPATH/libneko.*                      /usr/local/lib/
+          sudo ln -s $NEKOPATH/include/*                      /usr/local/include/
           sudo ln -s $NEKOPATH/*.ndll                         /usr/local/lib/neko/
           echo "NEKOPATH=$NEKOPATH" >> $GITHUB_ENV
 
@@ -388,66 +378,171 @@ jobs:
           cpp/Dox -i ../../xmldoc -ex microsoft -ex javax -theme $(haxelib libpath dox)/themes/default
         working-directory: ${{github.workspace}}/tests/docgen
 
-  linux-arm64:
-    runs-on: ubuntu-20.04
-    permissions:
-      packages: write
+  linux-arm64-build:
+    runs-on: ubuntu-22.04-arm
     env:
-      FORCE_COLOR: 1
+      PLATFORM: linux-arm64
+      OPAMYES: 1
     steps:
-      - name: Login to GitHub Container Registry
-        uses: docker/login-action@v2
+      - uses: actions/checkout@main
         with:
-          registry: ghcr.io
-          username: ${{ github.actor }}
-          password: ${{ secrets.GITHUB_TOKEN }}
-
-      - name: Install Earthly
-        run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/download/v0.6.13/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete'
+          submodules: recursive
 
-      - name: Set up QEMU
-        id: qemu
-        uses: docker/setup-qemu-action@v2
+      - name: Cache opam
+        id: cache-opam
+        uses: actions/cache@v4
         with:
-            image: tonistiigi/binfmt:latest
-            platforms: all
+          path: ~/.opam/
+          key: arm-${{ runner.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
-      - uses: actions/checkout@main
-        with:
-          submodules: recursive
+      - name: Install Neko from S3
+        run: |
+          set -ex
 
-      - name: Set CONTAINER_ vars
+          curl -sSL https://build.haxe.org/builds/neko/$PLATFORM/neko_latest.tar.gz -o $RUNNER_TEMP/neko_latest.tar.gz
+          tar -xf $RUNNER_TEMP/neko_latest.tar.gz -C $RUNNER_TEMP
+          NEKOPATH=`echo $RUNNER_TEMP/neko-*-*`
+          sudo mkdir -p /usr/local/bin
+          sudo mkdir -p /usr/local/include
+          sudo mkdir -p /usr/local/lib/neko
+          sudo ln -s $NEKOPATH/{neko,nekoc,nekoml,nekotools}  /usr/local/bin/
+          sudo ln -s $NEKOPATH/libneko.*                      /usr/local/lib/
+          sudo ln -s $NEKOPATH/include/*                      /usr/local/include/
+          sudo ln -s $NEKOPATH/*.ndll                         /usr/local/lib/neko/
+          echo "NEKOPATH=$NEKOPATH" >> $GITHUB_ENV
+
+      - name: Print Neko version
+        run: neko -version 2>&1
+
+
+      - name: Install dependencies
         run: |
-          echo "CONTAINER_REG=$(echo ${{ github.repository }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV;
-          echo "CONTAINER_TAG=$(echo ${{ github.ref_name }} | sed -e 's/[^A-Za-z0-9\.]/-/g')" >> $GITHUB_ENV;
+          set -ex
+          sudo apt-get update -qqy
+          sudo apt-get install -qqy opam libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build
 
-      - name: Build devcontainer
-        run: earthly --platform=linux/arm64 +devcontainer --IMAGE_NAME="ghcr.io/${CONTAINER_REG}_devcontainer" --IMAGE_TAG="${CONTAINER_TAG}-arm64" --IMAGE_CACHE="ghcr.io/haxefoundation/haxe_devcontainer:development-arm64"
-        env:
-          EARTHLY_PUSH: "${{ github.event_name == 'push' }}"
-          EARTHLY_USE_INLINE_CACHE: true
-          EARTHLY_SAVE_INLINE_CACHE: true
+      - name: Install OCaml libraries
+        if: steps.cache-opam.outputs.cache-hit != 'true'
+        run: |
+          set -ex
+          opam init -c ${{ env.OCAML_VERSION }}
+          opam pin add haxe . --no-action
+          opam install haxe --deps-only --assume-depexts
+          opam list
+          ocamlopt -v
 
       - name: Set ADD_REVISION=1 for non-release
         if: ${{ !startsWith(github.ref, 'refs/tags/') }}
         run: echo "ADD_REVISION=1" >> $GITHUB_ENV
 
-      - name: Build
-        run: earthly --platform=linux/arm64 +build --ADD_REVISION="$ADD_REVISION" --SET_SAFE_DIRECTORY="true"
-        env:
-          EARTHLY_PUSH: "${{ github.event_name == 'push' }}"
-          EARTHLY_REMOTE_CACHE: "ghcr.io/${{env.CONTAINER_REG}}_cache:build-${{env.CONTAINER_TAG}}-arm64"
+      - name: Build Haxe
+        run: |
+          set -ex
+          eval $(opam env)
+          opam config exec -- make -s -j`nproc` STATICLINK=1 haxe
+          opam config exec -- make -s haxelib
+          make -s package_unix
+          ls -l out
+          ldd -v ./haxe
+          ldd -v ./haxelib
+
+      # https://stackoverflow.com/questions/58033366/how-to-get-current-branch-within-github-actions
+      - name: Extract branch name
+        id: extract_branch
+        shell: bash
+        run: echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
 
       - name: Upload artifact
-        uses: actions/upload-artifact@v3
+        uses: actions/upload-artifact@v4
         with:
           name: linuxArm64Binaries
-          path: out/linux/arm64
+          path: out
+
+  linux-arm64-test:
+    needs: linux-arm64-build
+    runs-on: ubuntu-22.04-arm
+    env:
+      PLATFORM: linux-arm64
+      TEST: ${{matrix.target}}
+      HXCPP_COMPILE_CACHE: ~/hxcache
+      HAXE_STD_PATH: /usr/local/share/haxe/std
+    strategy:
+      fail-fast: false
+      matrix:
+        target: [macro, js, cpp, jvm, php, python, lua, neko]
+        include:
+          - target: lua
+            APT_PACKAGES: ncurses-dev
+    steps:
+      - uses: actions/checkout@main
+        with:
+          submodules: recursive
+      - uses: actions/download-artifact@v4
+        with:
+          name: linuxArm64Binaries
+          path: linuxBinaries
+
+      - name: Install Neko from S3
+        run: |
+          set -ex
+
+          curl -sSL https://build.haxe.org/builds/neko/$PLATFORM/neko_latest.tar.gz -o $RUNNER_TEMP/neko_latest.tar.gz
+          tar -xf $RUNNER_TEMP/neko_latest.tar.gz -C $RUNNER_TEMP
+          NEKOPATH=`echo $RUNNER_TEMP/neko-*-*`
+          sudo mkdir -p /usr/local/bin
+          sudo mkdir -p /usr/local/include
+          sudo mkdir -p /usr/local/lib/neko
+          sudo ln -s $NEKOPATH/{neko,nekoc,nekoml,nekotools}  /usr/local/bin/
+          sudo ln -s $NEKOPATH/libneko.*                      /usr/local/lib/
+          sudo ln -s $NEKOPATH/include/*                      /usr/local/include/
+          sudo ln -s $NEKOPATH/*.ndll                         /usr/local/lib/neko/
+          echo "NEKOPATH=$NEKOPATH" >> $GITHUB_ENV
+
+      - name: Print Neko version
+        run: neko -version 2>&1
+
+
+      - name: Setup Haxe
+        run: |
+          sudo apt install -qqy libmbedtls-dev
+
+          set -ex
+          tar -xf linuxBinaries/*_bin.tar.gz -C linuxBinaries --strip-components=1
+          sudo mkdir -p /usr/local/bin/
+          sudo mkdir -p /usr/local/share/haxe/
+          sudo ln -s `pwd`/linuxBinaries/haxe /usr/local/bin/haxe
+          sudo ln -s `pwd`/linuxBinaries/haxelib /usr/local/bin/haxelib
+          sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
+
+      - name: Print Haxe version
+        run: haxe -version
+
+      - name: Setup haxelib
+        run: |
+          set -ex
+          mkdir ~/haxelib
+          haxelib setup ~/haxelib
+
+      - name: Install apt packages
+        if: matrix.APT_PACKAGES
+        run: |
+          set -ex
+          sudo apt update -qqy
+          sudo apt install -qqy ${{matrix.APT_PACKAGES}}
+
+      - name: Test
+        run: haxe RunCi.hxml
+        working-directory: ${{github.workspace}}/tests
+        timeout-minutes: 20
 
   mac-build:
-    runs-on: macos-latest
+    strategy:
+      fail-fast: false
+      matrix:
+        os: [macos-14, macos-13]
+    runs-on: ${{ matrix.os }}
     env:
-      PLATFORM: mac
+      PLATFORM: mac${{ matrix.os == 'macos-14' && '-arm64' || '' }}
       OPAMYES: 1
       MACOSX_DEPLOYMENT_TARGET: 10.13
     steps:
@@ -457,10 +552,10 @@ jobs:
 
       - name: Cache opam
         id: cache-opam
-        uses: actions/cache@v3.0.11
+        uses: actions/cache@v4
         with:
           path: ~/.opam/
-          key: ${{ runner.os }}-${{ hashFiles('./haxe.opam', './libs/') }}
+          key: ${{ matrix.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
       - name: Install Neko from S3
         run: |
@@ -470,9 +565,11 @@ jobs:
           tar -xf $RUNNER_TEMP/neko_latest.tar.gz -C $RUNNER_TEMP
           NEKOPATH=`echo $RUNNER_TEMP/neko-*-*`
           sudo mkdir -p /usr/local/bin
+          sudo mkdir -p /usr/local/include
           sudo mkdir -p /usr/local/lib/neko
           sudo ln -s $NEKOPATH/{neko,nekoc,nekoml,nekotools}  /usr/local/bin/
           sudo ln -s $NEKOPATH/libneko.*                      /usr/local/lib/
+          sudo ln -s $NEKOPATH/include/*                      /usr/local/include/
           sudo ln -s $NEKOPATH/*.ndll                         /usr/local/lib/neko/
           echo "NEKOPATH=$NEKOPATH" >> $GITHUB_ENV
 
@@ -489,33 +586,29 @@ jobs:
           set -ex
           brew update
           brew bundle --file=tests/Brewfile --no-upgrade
-          cpanm IPC::System::Simple
-          cpanm String::ShellQuote
           curl -L https://github.com/madler/zlib/releases/download/v$ZLIB_VERSION/zlib-$ZLIB_VERSION.tar.gz | tar xz
           cd zlib-$ZLIB_VERSION
           ./configure
-          make && make install
+          sudo make && sudo make install
           cd ..
           curl -L https://github.com/ARMmbed/mbedtls/archive/v$MBEDTLS_VERSION.tar.gz | tar xz
           cd mbedtls-$MBEDTLS_VERSION
-          make && make install
+          sudo make && sudo make install
           cd ..
           curl -L https://github.com/PCRE2Project/pcre2/releases/download/pcre2-$PCRE2_VERSION/pcre2-$PCRE2_VERSION.tar.gz | tar xz
           cd pcre2-$PCRE2_VERSION
           ./configure --enable-unicode --enable-pcre2-8 --enable-pcre2-16 --enable-pcre2-32 --enable-unicode-properties --enable-pcre2grep-libz --enable-pcre2grep-libbz2 --enable-jit
-          make && make install
+          sudo make && sudo make install
           cd ..
 
       - name: Install OCaml libraries
         if: steps.cache-opam.outputs.cache-hit != 'true'
         run: |
           set -ex
-          opam init # --disable-sandboxing
+          opam init -c ${{ env.OCAML_VERSION }}
           opam update
-          opam switch create 4.08.1
           eval $(opam env)
           opam env
-          opam pin add ctypes 0.17.1 --yes
           opam pin add haxe . --no-action
           opam install haxe --deps-only --assume-depexts
           opam list
@@ -529,17 +622,25 @@ jobs:
         run: |
           set -ex
           eval $(opam env)
-          opam config exec -- make -s -j`sysctl -n hw.ncpu` STATICLINK=1 "LIB_PARAMS=/usr/local/lib/libz.a /usr/local/lib/libpcre2-8.a /usr/local/lib/libmbedtls.a /usr/local/lib/libmbedcrypto.a /usr/local/lib/libmbedx509.a -cclib '-framework Security -framework CoreFoundation'" haxe
+          opam config exec -- make -s STATICLINK=1 "LIB_PARAMS=\"/usr/local/lib/libz.a\" \"/usr/local/lib/libpcre2-8.a\" \"/usr/local/lib/libmbedtls.a\" \"/usr/local/lib/libmbedcrypto.a\" \"/usr/local/lib/libmbedx509.a\"" haxe
           opam config exec -- make -s haxelib
           make -s package_unix package_installer_mac
           ls -l out
           otool -L ./haxe
           otool -L ./haxelib
 
-      - name: Upload artifact
-        uses: actions/upload-artifact@v3
+      - name: Upload artifact (x64)
+        if: runner.arch == 'X64'
+        uses: actions/upload-artifact@v4
         with:
-          name: macBinaries
+          name: macX64Binaries
+          path: out
+
+      - name: Upload artifact (arm)
+        if: runner.arch == 'ARM64'
+        uses: actions/upload-artifact@v4
+        with:
+          name: macArmBinaries
           path: out
 
 
@@ -561,7 +662,7 @@ jobs:
       - uses: actions/checkout@main
         with:
           submodules: recursive
-      - uses: actions/download-artifact@v3
+      - uses: actions/download-artifact@v4
         with:
           name: win${{env.ARCH}}Binaries
           path: win${{env.ARCH}}Binaries
@@ -578,7 +679,7 @@ jobs:
       - name: Print Neko version
         run: neko -version 2>&1
 
-      - uses: actions/setup-node@v3
+      - uses: actions/setup-node@v4
         with:
           node-version: 18.17.1
 
@@ -637,11 +738,45 @@ jobs:
         shell: pwsh
         run: haxe RunCi.hxml
         working-directory: ${{github.workspace}}/tests
+        timeout-minutes: 20
 
 
-  mac-test:
+  mac-build-universal:
     needs: mac-build
     runs-on: macos-latest
+    steps:
+      - name: Checkout the repository
+        uses: actions/checkout@main
+      - uses: actions/download-artifact@v4
+        with:
+          name: macX64Binaries
+          path: macX64Binaries
+      - uses: actions/download-artifact@v4
+        with:
+          name: macArmBinaries
+          path: macArmBinaries
+
+      - name: Make universal binary
+        run: |
+          set -ex
+          tar -xf macX64Binaries/*_bin.tar.gz -C macX64Binaries --strip-components=1
+          tar -xf macArmBinaries/*_bin.tar.gz -C macArmBinaries --strip-components=1
+          lipo -create -output haxe macX64Binaries/haxe macArmBinaries/haxe
+          lipo -create -output haxelib macX64Binaries/haxelib macArmBinaries/haxelib
+          make -s package_unix package_installer_mac PACKAGE_INSTALLER_MAC_ARCH=universal
+          ls -l out
+          otool -L ./haxe
+          otool -L ./haxelib
+
+      - name: Upload artifact (universal)
+        uses: actions/upload-artifact@v4
+        with:
+          name: macBinaries
+          path: out
+
+  mac-test:
+    needs: mac-build-universal
+    runs-on: macos-13
     env:
       PLATFORM: mac
       TEST: ${{matrix.target}}
@@ -650,7 +785,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        target: [macro, js, hl, cpp, jvm, php, python, flash, neko]
+        target: [macro, js, hl, cpp, jvm, php, python, lua, flash, neko]
         include:
           - target: hl
             BREW_PACKAGES: ninja
@@ -658,7 +793,7 @@ jobs:
       - uses: actions/checkout@main
         with:
           submodules: recursive
-      - uses: actions/download-artifact@v3
+      - uses: actions/download-artifact@v4
         with:
           name: macBinaries
           path: macBinaries
@@ -671,9 +806,11 @@ jobs:
           tar -xf $RUNNER_TEMP/neko_latest.tar.gz -C $RUNNER_TEMP
           NEKOPATH=`echo $RUNNER_TEMP/neko-*-*`
           sudo mkdir -p /usr/local/bin
+          sudo mkdir -p /usr/local/include
           sudo mkdir -p /usr/local/lib/neko
           sudo ln -s $NEKOPATH/{neko,nekoc,nekoml,nekotools}  /usr/local/bin/
           sudo ln -s $NEKOPATH/libneko.*                      /usr/local/lib/
+          sudo ln -s $NEKOPATH/include/*                      /usr/local/include/
           sudo ln -s $NEKOPATH/*.ndll                         /usr/local/lib/neko/
           echo "NEKOPATH=$NEKOPATH" >> $GITHUB_ENV
 
@@ -712,12 +849,13 @@ jobs:
           echo "" > sys/compile-fs.hxml
           haxe RunCi.hxml
         working-directory: ${{github.workspace}}/tests
+        timeout-minutes: 60
 
 
   deploy:
     if: success() && github.repository_owner == 'HaxeFoundation' && github.event_name != 'pull_request'
-    needs: [linux-test, linux-arm64, mac-test, windows64-test]
-    runs-on: ubuntu-20.04
+    needs: [linux-test, linux-arm64-test, mac-test, windows64-test]
+    runs-on: ubuntu-22.04
     steps:
       # this is only needed for to get `COMMIT_DATE`...
       # maybe https://github.community/t/expose-commit-timestamp-in-the-github-context-data/16460/3
@@ -726,7 +864,7 @@ jobs:
         uses: actions/checkout@main
 
       - name: Download build artifacts
-        uses: actions/download-artifact@v3
+        uses: actions/download-artifact@v4
 
       - name: Install awscli
         run: |
@@ -786,8 +924,8 @@ jobs:
 
   deploy_apidoc:
     if: success() && github.repository_owner == 'HaxeFoundation' && github.event_name != 'pull_request'
-    needs: [linux-test, linux-arm64, mac-test, windows64-test]
-    runs-on: ubuntu-20.04
+    needs: [linux-test, linux-arm64-test, mac-test, windows64-test]
+    runs-on: ubuntu-22.04
     steps:
       - name: Install dependencies
         run: |
@@ -795,7 +933,7 @@ jobs:
           sudo apt-get install -qqy libc6
 
       - name: Download Haxe
-        uses: actions/download-artifact@v3
+        uses: actions/download-artifact@v4
         with:
           name: linuxBinaries
           path: linuxBinaries
@@ -811,7 +949,7 @@ jobs:
           sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
 
       - name: Download xmldoc artifact
-        uses: actions/download-artifact@v3
+        uses: actions/download-artifact@v4
         with:
           name: xmldoc
           path: xmldoc

+ 19 - 0
.github/workflows/target.yml

@@ -0,0 +1,19 @@
+name: Check pull request target branch
+on:
+  pull_request_target:
+    types:
+      - opened
+      - reopened
+      - synchronize
+      - edited
+jobs:
+  check-branches:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Check branches
+        run: |
+          if [ ${{ github.base_ref }} != "development" ]; then
+            echo "Merge requests should target `development`."
+            exit 1
+          fi
+

+ 2 - 6
.gitignore

@@ -9,6 +9,7 @@
 *.exe
 .*.swp
 .haxelib
+/opam
 /out
 /installer
 
@@ -41,6 +42,7 @@
 /std/tools/haxelib/haxelib.n
 /std/tools/haxelib/index.n
 
+/tests/**/dump
 /tests/unit/as3
 /tests/unit/cpp
 /tests/unit/java
@@ -54,7 +56,6 @@
 /tests/unit/php
 /tests/unit/cs
 /tests/unit/cs_unsafe
-/tests/unit/dump
 /tests/unit/db.db3
 
 /tests/unit/native_java/obj
@@ -69,7 +70,6 @@
 /tests/unit/node_modules/
 
 /tests/nullsafety/bin
-/tests/nullsafety/dump
 
 /haxe.sublime*
 .idea
@@ -85,9 +85,7 @@ tests/unit/unit.py
 tests/unit/unit.py.res1.txt
 tests/unit/unit.py.res2.bin
 tests/sys/bin/
-/tests/sys/dump/
 /tests/sys/test-res/
-tests/optimization/dump/
 tests/misc/projects/*/*.n
 tests/misc/*/*/*.lua
 tests/unit/bin/
@@ -100,7 +98,6 @@ tests/misc/projects/Issue4070/cpp/
 /tests/misc/eventLoop/eventLoop.js
 /tests/misc/eventLoop/eventLoop.n
 /tests/misc/eventLoop/eventLoop.swf
-/tests/misc/eventLoop/dump
 /tests/misc/eventLoop/eventLoop.py
 /tests/misc/eventLoop/php
 *.vscode/
@@ -127,7 +124,6 @@ dev-display.hxml
 tests/sourcemaps/bin
 /*_plugin.ml
 tests/benchs/export/
-tests/benchs/dump/
 tests/display/.unittest/
 tests/unit/.unittest/
 tests/threads/export/

+ 5 - 1
.vscode/settings.json

@@ -28,5 +28,9 @@
 			],
 			"url": "./.vscode/schemas/meta.schema.json"
 		}
-	]
+	],
+	"ocaml.sandbox": {
+		"kind": "opam",
+		"switch": "default"
+	}
 }

+ 8 - 5
.vscode/tasks.json

@@ -5,8 +5,11 @@
 			"label": "make: haxe",
 			"type": "shell",
 			"command": "make ADD_REVISION=1 -s -j haxe",
+			"osx": {
+				"command": "eval $(opam env) && make ADD_REVISION=1 -s -j haxe",
+			},
 			"windows": {
-				"command": "make ADD_REVISION=1 -f Makefile.win -s -j haxe"
+				"command": "opam exec -- make ADD_REVISION=1 -f Makefile.win -s -j haxe"
 			},
 			"problemMatcher": [],
 			"group": {
@@ -19,7 +22,7 @@
 			"type": "shell",
 			"command": "make -s -j libs",
 			"windows": {
-				"command": "make -f Makefile.win -s -j libs"
+				"command": "opam exec -- make -f Makefile.win -s -j libs"
 			},
 			"problemMatcher": []
 		},
@@ -28,7 +31,7 @@
 			"type": "shell",
 			"command": "make -s haxelib",
 			"windows": {
-				"command": "make -f Makefile.win -s haxelib"
+				"command": "opam exec -- make -f Makefile.win -s haxelib"
 			},
 			"problemMatcher": ["$haxe", "$haxe-absolute"]
 		},
@@ -37,7 +40,7 @@
 			"type": "shell",
 			"command": "make s -j libs && make ADD_REVISION=1 -s -j haxe && make -s haxelib",
 			"windows": {
-				"command": "make -f Makefile.win -s -j libs && make ADD_REVISION=1 -f Makefile.win -s -j haxe && make -f Makefile.win -s haxelib"
+				"command": "opam exec -- make -f Makefile.win -s -j libs && make ADD_REVISION=1 -f Makefile.win -s -j haxe && make -f Makefile.win -s haxelib"
 			},
 			"problemMatcher": ["$haxe", "$haxe-absolute"]
 		},
@@ -48,4 +51,4 @@
 			"problemMatcher": []
 		}
 	]
-}
+}

+ 0 - 411
Earthfile

@@ -1,411 +0,0 @@
-VERSION 0.6
-FROM mcr.microsoft.com/vscode/devcontainers/base:0-bionic
-ARG DEVCONTAINER_IMAGE_NAME_DEFAULT=ghcr.io/haxefoundation/haxe_devcontainer
-
-ARG USERNAME=vscode
-ARG USER_UID=1000
-ARG USER_GID=$USER_UID
-
-ARG WORKDIR=/workspace
-RUN mkdir -m 777 "$WORKDIR"
-WORKDIR "$WORKDIR"
-
-ARG --required TARGETARCH
-
-devcontainer-library-scripts:
-    RUN curl -fsSLO https://raw.githubusercontent.com/microsoft/vscode-dev-containers/main/script-library/common-debian.sh
-    RUN curl -fsSLO https://raw.githubusercontent.com/microsoft/vscode-dev-containers/main/script-library/docker-debian.sh
-    SAVE ARTIFACT --keep-ts *.sh AS LOCAL .devcontainer/library-scripts/
-
-devcontainer:
-    # Avoid warnings by switching to noninteractive
-    ENV DEBIAN_FRONTEND=noninteractive
-
-    ARG INSTALL_ZSH="false"
-    ARG UPGRADE_PACKAGES="true"
-    ARG ENABLE_NONROOT_DOCKER="true"
-    ARG USE_MOBY="false"
-    COPY .devcontainer/library-scripts/common-debian.sh .devcontainer/library-scripts/docker-debian.sh /tmp/library-scripts/
-    RUN apt-get update \
-        && /bin/bash /tmp/library-scripts/common-debian.sh "${INSTALL_ZSH}" "${USERNAME}" "${USER_UID}" "${USER_GID}" "${UPGRADE_PACKAGES}" "true" "true" \
-        && /bin/bash /tmp/library-scripts/docker-debian.sh "${ENABLE_NONROOT_DOCKER}" "/var/run/docker-host.sock" "/var/run/docker.sock" "${USERNAME}" "${USE_MOBY}" \
-        # Clean up
-        && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts/
-
-    # Setting the ENTRYPOINT to docker-init.sh will configure non-root access
-    # to the Docker socket. The script will also execute CMD as needed.
-    ENTRYPOINT [ "/usr/local/share/docker-init.sh" ]
-    CMD [ "sleep", "infinity" ]
-
-    # Configure apt and install packages
-    RUN apt-get update \
-        && apt-get install -qqy --no-install-recommends apt-utils dialog 2>&1 \
-        && apt-get install -qqy --no-install-recommends \
-            iproute2 \
-            procps \
-            sudo \
-            bash-completion \
-            build-essential \
-            curl \
-            wget \
-            software-properties-common \
-            direnv \
-            tzdata \
-            # install docker engine for using `WITH DOCKER`
-            docker-ce \
-        # install node
-        && curl -sL https://deb.nodesource.com/setup_16.x | bash - \
-        && apt-get install -qqy --no-install-recommends nodejs=16.* \
-        # install ocaml and other haxe compiler deps
-        && add-apt-repository ppa:avsm/ppa \
-        && add-apt-repository ppa:haxe/ocaml \
-        && apt-get install -qqy --no-install-recommends \
-            ocaml-nox \
-            camlp5 \
-            opam \
-            libpcre2-dev \
-            zlib1g-dev \
-            libgtk2.0-dev \
-            libmbedtls-dev \
-            ninja-build \
-            libstring-shellquote-perl \
-            libipc-system-simple-perl \
-        #
-        # Clean up
-        && apt-get autoremove -y \
-        && apt-get clean -y \
-        && rm -rf /var/lib/apt/lists/*
-
-    # Switch back to dialog for any ad-hoc use of apt-get
-    ENV DEBIAN_FRONTEND=
-
-    DO +INSTALL_NEKO
-
-    COPY +earthly/earthly /usr/local/bin/
-    RUN earthly bootstrap --no-buildkit --with-autocomplete
-
-    USER $USERNAME
-
-    # Do not show git branch in bash prompt because it's slow
-    # https://github.com/microsoft/vscode-dev-containers/issues/1196#issuecomment-988388658
-    RUN git config --global codespaces-theme.hide-status 1
-
-    # Install OCaml libraries
-    COPY haxe.opam .
-    RUN opam init --disable-sandboxing
-    RUN opam switch create 4.08.1
-    RUN eval $(opam env)
-    RUN opam env
-    RUN opam install . --yes --deps-only --no-depexts
-    RUN opam list
-    RUN ocamlopt -v
-
-    USER root
-
-    ARG IMAGE_NAME="$DEVCONTAINER_IMAGE_NAME_DEFAULT"
-    ARG IMAGE_TAG="development"
-    ARG IMAGE_CACHE="$IMAGE_NAME:$IMAGE_TAG"
-    SAVE IMAGE --cache-from="$IMAGE_CACHE" --push "$IMAGE_NAME:$IMAGE_TAG"
-
-devcontainer-multiarch-amd64:
-    ARG IMAGE_NAME="$DEVCONTAINER_IMAGE_NAME_DEFAULT"
-    ARG IMAGE_TAG="development"
-    FROM --platform=linux/amd64 +devcontainer --IMAGE_NAME="$IMAGE_NAME" --IMAGE_TAG="$IMAGE_TAG-amd64"
-    SAVE IMAGE --push "$IMAGE_NAME:$IMAGE_TAG"
-
-devcontainer-multiarch-arm64:
-    ARG IMAGE_NAME="$DEVCONTAINER_IMAGE_NAME_DEFAULT"
-    ARG IMAGE_TAG="development"
-    FROM --platform=linux/arm64 +devcontainer --IMAGE_NAME="$IMAGE_NAME" --IMAGE_TAG="$IMAGE_TAG-arm64"
-    SAVE IMAGE --push "$IMAGE_NAME:$IMAGE_TAG"
-
-devcontainer-multiarch:
-    BUILD +devcontainer-multiarch-amd64
-    BUILD +devcontainer-multiarch-arm64
-
-# Usage:
-# COPY +earthly/earthly /usr/local/bin/
-# RUN earthly bootstrap --no-buildkit --with-autocomplete
-earthly:
-    ARG --required TARGETARCH
-    RUN curl -fsSL https://github.com/earthly/earthly/releases/download/v0.6.13/earthly-linux-${TARGETARCH} -o /usr/local/bin/earthly \
-        && chmod +x /usr/local/bin/earthly
-    SAVE ARTIFACT /usr/local/bin/earthly
-
-INSTALL_PACKAGES:
-    COMMAND
-    ARG PACKAGES
-    RUN apt-get update -qqy && \
-        apt-get install -qqy --no-install-recommends $PACKAGES && \
-        apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*
-
-INSTALL_NEKO:
-    COMMAND
-    ARG NEKOPATH=/neko
-    COPY +neko/* "$NEKOPATH/"
-    ARG PREFIX=/usr/local
-    RUN bash -c "ln -s \"$NEKOPATH\"/{neko,nekoc,nekoml,nekotools} \"$PREFIX/bin/\""
-    RUN bash -c "ln -s \"$NEKOPATH\"/libneko.* \"$PREFIX/lib/\""
-    RUN mkdir -p "$PREFIX/lib/neko/"
-    RUN bash -c "ln -s \"$NEKOPATH\"/*.ndll \"$PREFIX/lib/neko/\""
-    RUN ldconfig
-
-INSTALL_HAXE:
-    COMMAND
-    ARG PREFIX=/usr/local
-    COPY +build/haxe +build/haxelib "$PREFIX/bin/"
-    COPY std "$PREFIX/share/haxe/std"
-
-try-neko:
-    DO +INSTALL_NEKO
-    RUN neko -version
-    RUN nekotools
-
-try-haxe:
-    DO +INSTALL_NEKO
-    DO +INSTALL_HAXE
-    RUN haxe --version
-    RUN haxelib version
-
-neko:
-    RUN set -ex && \
-        case "$TARGETARCH" in \
-            amd64) PLATFORM=linux64;; \
-            arm64) PLATFORM=linux-arm64;; \
-            *) exit 1;; \
-        esac && \
-        curl -fsSL https://build.haxe.org/builds/neko/$PLATFORM/neko_latest.tar.gz -o neko_latest.tar.gz && \
-        tar -xf neko_latest.tar.gz && \
-        mv `echo neko-*-*` /tmp/neko-unpacked
-    SAVE ARTIFACT /tmp/neko-unpacked/*
-    SAVE IMAGE --cache-hint
-
-build:
-    FROM +devcontainer
-
-    USER $USERNAME
-
-    # Build Haxe
-    COPY --dir extra libs plugins src* std dune* Makefile* .
-
-    # the Makefile calls git to get commit sha
-    COPY .git .git
-    ARG SET_SAFE_DIRECTORY="false"
-    IF [ "$SET_SAFE_DIRECTORY" = "true" ]
-        RUN git config --global --add safe.directory "$WORKDIR"
-    END
-
-    ARG ADD_REVISION
-    ENV ADD_REVISION=$ADD_REVISION
-    RUN opam config exec -- make -s -j`nproc` STATICLINK=1 haxe && ldd -v ./haxe
-    RUN opam config exec -- make -s haxelib && ldd -v ./haxelib
-    RUN make -s package_unix && ls -l out
-
-    ARG TARGETPLATFORM
-    SAVE ARTIFACT --keep-ts ./out/* AS LOCAL out/$TARGETPLATFORM/
-    SAVE ARTIFACT --keep-ts ./haxe AS LOCAL out/$TARGETPLATFORM/
-    SAVE ARTIFACT --keep-ts ./haxelib AS LOCAL out/$TARGETPLATFORM/
-    SAVE IMAGE --cache-hint
-
-build-multiarch:
-    ARG ADD_REVISION
-    BUILD --platform=linux/amd64 --platform=linux/arm64 +build --ADD_REVISION=$ADD_REVISION
-
-xmldoc:
-    DO +INSTALL_NEKO
-    DO +INSTALL_HAXE
-
-    COPY --dir extra .
-
-    WORKDIR extra
-    RUN haxelib newrepo
-    RUN haxelib git hxcpp  https://github.com/HaxeFoundation/hxcpp
-    RUN haxelib git hxjava https://github.com/HaxeFoundation/hxjava
-    RUN haxe doc.hxml
-
-    ARG COMMIT
-    ARG BRANCH
-    RUN echo "{\"commit\":\"$COMMIT\",\"branch\":\"$BRANCH\"}" > doc/info.json
-
-    SAVE ARTIFACT --keep-ts ./doc AS LOCAL extra/doc
-
-test-environment:
-    # we use a sightly newer ubuntu for easier installation of the target runtimes (e.g. php)
-    FROM ubuntu:focal
-    DO +INSTALL_NEKO
-    DO +INSTALL_HAXE
-
-    ENV DEBIAN_FRONTEND=noninteractive
-    DO +INSTALL_PACKAGES --PACKAGES="ca-certificates curl wget git build-essential locales sqlite3"
-
-    # Node.js is required as there are tests that use it (search "-cmd node")
-    RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash - && \
-        apt-get install -qqy nodejs && \
-        apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*
-
-    # set locale
-    RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
-    ENV LANG=en_US.UTF-8
-    ENV LANGUAGE=en_US:en
-    ENV LC_ALL=en_US.UTF-8
-
-    SAVE IMAGE --cache-hint
-
-test-environment-java:
-    FROM +test-environment
-    DO +INSTALL_PACKAGES --PACKAGES="default-jdk"
-    SAVE IMAGE --cache-hint
-
-test-environment-js:
-    # somehow js tests require hxjava which in turns require javac
-    FROM +test-environment-java
-
-test-environment-python:
-    FROM +test-environment
-    DO +INSTALL_PACKAGES --PACKAGES="python3"
-    SAVE IMAGE --cache-hint
-
-test-environment-php:
-    FROM +test-environment
-    DO +INSTALL_PACKAGES --PACKAGES="php-cli php-mbstring php-sqlite3"
-    SAVE IMAGE --cache-hint
-
-test-environment-hl:
-    FROM +test-environment
-    DO +INSTALL_PACKAGES --PACKAGES="cmake ninja-build libturbojpeg-dev libpng-dev zlib1g-dev libvorbis-dev libsqlite3-dev"
-    SAVE IMAGE --cache-hint
-
-test-environment-lua:
-    # hererocks uses pip
-    FROM +test-environment-python
-    DO +INSTALL_PACKAGES --PACKAGES="libssl-dev libreadline-dev python3-pip unzip libpcre2-dev cmake"
-    RUN ln -s /root/.local/bin/hererocks /bin/
-    SAVE IMAGE --cache-hint
-
-test-environment-cpp:
-    FROM +test-environment
-
-    ARG TARGETPLATFORM
-
-    IF [ "$TARGETPLATFORM" = "linux/amd64" ]
-        DO +INSTALL_PACKAGES --PACKAGES="g++-multilib"
-    ELSE IF [ "$TARGETPLATFORM" = "linux/arm64" ]
-        DO +INSTALL_PACKAGES --PACKAGES="g++-multilib-arm-linux-gnueabi"
-    ELSE
-        RUN echo "Unsupported platform $TARGETPLATFORM" && exit 1
-    END
-
-    SAVE IMAGE --cache-hint
-
-test-environment-flash:
-    # apache flex requires java
-    FROM +test-environment-java
-    # requirements for running flash player
-    DO +INSTALL_PACKAGES --PACKAGES="libglib2.0-0 libfreetype6 xvfb libxcursor1 libnss3 libgtk2.0-0"
-    SAVE IMAGE --cache-hint
-
-RUN_CI:
-    COMMAND
-    COPY tests tests
-    RUN mkdir /haxelib && haxelib setup /haxelib
-    WORKDIR tests
-    ARG --required TEST
-    ENV TEST="$TEST"
-    RUN haxe RunCi.hxml
-
-test-macro:
-    FROM +test-environment
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=macro
-
-test-neko:
-    FROM +test-environment
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=neko
-
-test-js:
-    FROM +test-environment-js
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=js
-
-test-hl:
-    FROM +test-environment-hl
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=hl
-
-test-cpp:
-    FROM +test-environment-cpp
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=cpp
-
-test-java:
-    FROM +test-environment-java
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=java
-
-test-jvm:
-    FROM +test-environment-java
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=jvm
-
-test-php:
-    FROM +test-environment-php
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=php
-
-test-python:
-    FROM +test-environment-python
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=python
-
-test-lua:
-    FROM +test-environment-lua
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=lua
-
-test-flash:
-    FROM +test-environment-flash
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=flash
-
-test-all:
-    ARG TARGETPLATFORM
-
-    BUILD +test-macro
-    BUILD +test-neko
-    BUILD +test-php
-    BUILD +test-python
-    BUILD +test-java
-    BUILD +test-jvm
-    BUILD +test-cpp
-    BUILD +test-lua
-    BUILD +test-js
-    BUILD +test-flash
-
-    IF [ "$TARGETPLATFORM" = "linux/amd64" ]
-        BUILD +test-hl # FIXME: hl can't compile on arm64 (JIT issue?)
-    END
-
-github-actions:
-    DO +INSTALL_NEKO
-    DO +INSTALL_HAXE
-    RUN mkdir -p "$WORKDIR"/.github/workflows
-    COPY extra/github-actions extra/github-actions
-    WORKDIR extra/github-actions
-    RUN haxe build.hxml
-    SAVE ARTIFACT --keep-ts "$WORKDIR"/.github/workflows AS LOCAL .github/workflows
-
-ghcr-login:
-    LOCALLY
-    RUN echo "$GITHUB_CR_PAT" | docker login ghcr.io -u "$GITHUB_USERNAME" --password-stdin

+ 35 - 23
Makefile

@@ -59,30 +59,18 @@ PACKAGE_FILE_NAME=haxe_$(COMMIT_DATE)_$(COMMIT_SHA)
 HAXE_VERSION=$(shell $(CURDIR)/$(HAXE_OUTPUT) -version 2>&1 | awk '{print $$1;}')
 HAXE_VERSION_SHORT=$(shell echo "$(HAXE_VERSION)" | grep -oE "^[0-9]+\.[0-9]+\.[0-9]+")
 
-NEKO_VERSION=2.3.0
+NEKO_VERSION=2.4.0-rc.1
 NEKO_MAJOR_VERSION=$(shell echo "$(NEKO_VERSION)" | grep -oE "^[0-9]+")
 NEKO_VERSION_TAG=v$(shell echo "$(NEKO_VERSION)" | sed "s/\./-/g")
 
-ifneq ($(STATICLINK),0)
-	LIB_PARAMS= -cclib '-Wl,-Bstatic -lpcre2-8 -lz -lmbedtls -lmbedx509 -lmbedcrypto -Wl,-Bdynamic '
-else
-	LIB_PARAMS?= -cclib -lpcre2-8 -cclib -lz -cclib -lmbedtls -cclib -lmbedx509 -cclib -lmbedcrypto
-endif
-ifeq ($(SYSTEM_NAME),Mac)
-	LIB_PARAMS+= -cclib '-framework Security -framework CoreFoundation'
-endif
-
 all: haxe tools
 
 haxe:
-	$(DUNE_COMMAND) build --workspace dune-workspace.dev src-prebuild/prebuild.exe
-	_build/default/src-prebuild/prebuild.exe libparams $(LIB_PARAMS) > lib.sexp
-	_build/default/src-prebuild/prebuild.exe version "$(ADD_REVISION)" "$(BRANCH)" "$(COMMIT_SHA)" > src/compiler/version.ml
-	$(DUNE_COMMAND) build --workspace dune-workspace.dev src/haxe.exe
+	dune build --profile release src/haxe.exe
 	cp -f _build/default/src/haxe.exe ./"$(HAXE_OUTPUT)"
 
 plugin: haxe
-	$(DUNE_COMMAND) build --workspace dune-workspace.dev plugins/$(PLUGIN)/$(PLUGIN).cmxs
+	$(DUNE_COMMAND) build --profile release plugins/$(PLUGIN)/$(PLUGIN).cmxs
 	mkdir -p plugins/$(PLUGIN)/cmxs/$(SYSTEM_NAME)
 	cp -f _build/default/plugins/$(PLUGIN)/$(PLUGIN).cmxs plugins/$(PLUGIN)/cmxs/$(SYSTEM_NAME)/plugin.cmxs
 
@@ -104,10 +92,24 @@ copy_haxetoolkit: /cygdrive/c/HaxeToolkit/haxe/haxe.exe
 	cp $< $@
 endif
 
+ifeq ($(SYSTEM_NAME),Mac)
+# This assumes that haxelib and neko will both be installed into INSTALL_DIR,
+# which is the case when installing using the mac installer package
+HAXELIB_LFLAGS= -Wl,-rpath,$(INSTALL_DIR)/lib
+endif
+
+haxelib_unix:
+	cd $(CURDIR)/extra/haxelib_src && \
+	HAXE_STD_PATH=$(CURDIR)/std $(CURDIR)/$(HAXE_OUTPUT) client.hxml && \
+	nekotools boot -c run.n
+	$(CC) $(CURDIR)/extra/haxelib_src/run.c -o $(HAXELIB_OUTPUT) -lneko $(HAXELIB_LFLAGS)
+
 # haxelib should depends on haxe, but we don't want to do that...
-haxelib:
-	(cd $(CURDIR)/extra/haxelib_src && $(CURDIR)/$(HAXE_OUTPUT) client.hxml && nekotools boot run.n)
-	mv extra/haxelib_src/run$(EXTENSION) $(HAXELIB_OUTPUT)
+ifeq ($(SYSTEM_NAME),Windows)
+haxelib: haxelib_$(PLATFORM)
+else
+haxelib: haxelib_unix
+endif
 
 tools: haxelib
 
@@ -128,7 +130,7 @@ uninstall:
 	rm -rf $(DESTDIR)$(INSTALL_STD_DIR)
 
 opam_install:
-	opam install camlp5 ocamlfind dune --yes
+	opam install ocamlfind dune --yes
 
 haxe_deps:
 	opam pin add haxe . --no-action
@@ -167,19 +169,29 @@ xmldoc:
 $(INSTALLER_TMP_DIR):
 	mkdir -p $(INSTALLER_TMP_DIR)
 
-$(INSTALLER_TMP_DIR)/neko-osx64.tar.gz: $(INSTALLER_TMP_DIR)
-	wget -nv https://github.com/HaxeFoundation/neko/releases/download/$(NEKO_VERSION_TAG)/neko-$(NEKO_VERSION)-osx64.tar.gz -O installer/neko-osx64.tar.gz
+# Can be 'universal', 'arm64', or 'x86_64'
+ifndef PACKAGE_INSTALLER_MAC_ARCH
+PACKAGE_INSTALLER_MAC_ARCH:=$(shell uname -m)
+endif
+
+$(INSTALLER_TMP_DIR)/neko-osx.tar.gz: $(INSTALLER_TMP_DIR)
+	NEKO_ARCH_SUFFIX=$$(if [ "$(PACKAGE_INSTALLER_MAC_ARCH)" = "x86_64" ]; then \
+		echo 64; \
+	else \
+		echo "-$(PACKAGE_INSTALLER_MAC_ARCH)"; \
+	fi); \
+	wget -nv https://github.com/HaxeFoundation/neko/releases/download/$(NEKO_VERSION_TAG)/neko-$(NEKO_VERSION)-osx$$NEKO_ARCH_SUFFIX.tar.gz -O installer/neko-osx.tar.gz
 
 # Installer
 
-package_installer_mac: $(INSTALLER_TMP_DIR)/neko-osx64.tar.gz package_unix
+package_installer_mac: $(INSTALLER_TMP_DIR)/neko-osx.tar.gz package_unix
 	$(eval OUTFILE := $(shell pwd)/$(PACKAGE_OUT_DIR)/$(PACKAGE_FILE_NAME)_installer.tar.gz)
 	$(eval PACKFILE := $(shell pwd)/$(PACKAGE_OUT_DIR)/$(PACKAGE_FILE_NAME)_bin.tar.gz)
 	$(eval VERSION := $(shell $(CURDIR)/$(HAXE_OUTPUT) -version 2>&1))
 	bash -c "rm -rf $(INSTALLER_TMP_DIR)/{resources,pkg,tgz,haxe.tar.gz}"
 	mkdir $(INSTALLER_TMP_DIR)/resources
 	# neko - unpack to change the dir name
-	cd $(INSTALLER_TMP_DIR)/resources && tar -zxvf ../neko-osx64.tar.gz
+	cd $(INSTALLER_TMP_DIR)/resources && tar -zxvf ../neko-osx.tar.gz
 	mv $(INSTALLER_TMP_DIR)/resources/neko* $(INSTALLER_TMP_DIR)/resources/neko
 	cd $(INSTALLER_TMP_DIR)/resources && tar -zcvf neko.tar.gz neko
 	# haxe - unpack to change the dir name

+ 7 - 4
Makefile.win

@@ -42,10 +42,6 @@ ifdef FILTER
 CC_CMD=($(COMPILER) $(ALL_CFLAGS) -c $< 2>tmp.cmi && $(FILTER)) || ($(FILTER) && exit 1)
 endif
 
-ifeq ($(STATICLINK),0)
-	LIB_PARAMS = -cclib -lpcre2-8 -cclib -lz -cclib -lcrypt32 -cclib -lmbedtls -cclib -lmbedcrypto -cclib -lmbedx509
-endif
-
 PACKAGE_FILES=$(HAXE_OUTPUT) $(HAXELIB_OUTPUT) std \
 	"$$(cygcheck $(CURDIR)/$(HAXE_OUTPUT) | grep zlib1.dll | sed -e 's/^\s*//')" \
 	"$$(cygcheck $(CURDIR)/$(HAXE_OUTPUT) | grep libpcre2-8-0.dll | sed -e 's/^\s*//')" \
@@ -53,6 +49,13 @@ PACKAGE_FILES=$(HAXE_OUTPUT) $(HAXELIB_OUTPUT) std \
 	"$$(cygcheck $(CURDIR)/$(HAXE_OUTPUT) | grep libmbedtls.dll | sed -e 's/^\s*//')" \
 	"$$(cygcheck $(CURDIR)/$(HAXE_OUTPUT) | grep libmbedx509.dll | sed -e 's/^\s*//')"
 
+# haxelib should depends on haxe, but we don't want to do that...
+haxelib_win:
+	cd $(CURDIR)/extra/haxelib_src && \
+	HAXE_STD_PATH=$$(cygpath -m $(CURDIR)/std) $(CURDIR)/$(HAXE_OUTPUT) client.hxml && \
+	nekotools boot run.n
+	mv extra/haxelib_src/run$(EXTENSION) $(HAXELIB_OUTPUT)
+
 echo_package_files:
 	echo $(PACKAGE_FILES)
 

+ 1 - 2
README.md

@@ -80,9 +80,8 @@ You can get help and talk with fellow Haxers from around the world via:
  * [Haxe on Stack Overflow](https://stackoverflow.com/questions/tagged/haxe)
  * [Haxe Gitter chatroom](https://gitter.im/HaxeFoundation/haxe/)
  * [Haxe Discord server](https://discordapp.com/invite/0uEuWH3spjck73Lo)
- * [#haxe on Twitter](https://twitter.com/hashtag/haxe?src=hash)
 
-:+1: Get notified of the latest Haxe news, follow us on [Twitter](https://twitter.com/haxelang), [Facebook](https://www.facebook.com/haxe.org) and don't forget to read the [Haxe roundups](https://haxe.io/).
+:+1: Get notified of the latest Haxe news, don't forget to read the [Haxe roundups](https://haxe.io/).
 
 ## Version compatibility
 

+ 96 - 0
WinSetup.ps1

@@ -0,0 +1,96 @@
+# Usage:
+# - install Git
+# - install Neko
+# - checkout haxe git
+# - run from command "powershell -noexit -ExecutionPolicy Bypass -File .\WinSetup.ps1"
+
+function Cmd-Path($file) {
+	try { Split-Path -Parent (Get-Command "$file.exe" -ErrorAction Stop).Source } catch { "" }
+}
+
+# resolve Opam binary and repo
+# you can choose when opam is installed by setting OPAM_INSTALL_DIR (and OPAMROOT - optional)
+
+$Opam = Cmd-Path "opam"
+$OpamRepo = $env:OPAMROOT
+$Git = Cmd-Path "git"
+
+if( !$Opam ) { $Opam = $env:OPAM_INSTALL_DIR }
+if( !$Opam ) { $Opam = (Get-Item .).FullName + "\opam" }
+if( !$OpamRepo ) { $OpamRepo = "$Opam\repo" }
+
+$CygRoot = "$OpamRepo\.cygwin\root"
+$WinSysPath = "$env:SystemRoot\System32"
+$Neko = Cmd-Path "neko"
+$RegPath = "HKCU:\Environment"
+$MbedVer = "2.16.3"
+$MbedTLS = "https://github.com/Simn/mingw64-mbedtls/releases/download/$MbedVer/mingw64-x86_64-mbedtls-$MbedVer-1.tar.xz"
+
+function Install-Init {
+
+	if( !$Git ) {
+		echo "**ERROR** git.exe could not be found in PATH"
+		Exit
+	}
+
+	if( !$Neko ) {
+		echo "**ERROR** Neko.exe could not be found in PATH"
+		Exit
+	}
+
+	# reset PATH to prevent conflicting cygwin or existing install
+	Set-Item -Path env:PATH -Value "$CygRoot\usr\x86_64-w64-mingw32\bin;$CygRoot\bin;$Opam;$Neko;$Git;$WinSysPath"
+
+	# set OPAM root dir
+	Set-Item -Path env:OPAMROOT -Value "$OpamRepo"
+}
+
+function Install-Opam {
+	# download opam binary
+	Invoke-Expression "& { $(Invoke-RestMethod https://opam.ocaml.org/install.ps1)} -OpamBinDir $Opam"
+
+	# init opam, assume that we have windows GIT installed
+	Invoke-Expression "opam init --cygwin-internal-install --no-git-location --shell=powershell --shell-setup"
+}
+
+function Install-Haxe-Deps {
+	Invoke-Expression "opam install . --deps-only --confirm-level=yes"
+
+	# install mbedtls mingw package
+	$tmpFile = "./mbed.tgz"
+	Invoke-Expression "curl $MbedTLS -o $tmpFile"
+	Invoke-Expression "tar -C / -xvf $tmpFile"
+	Remove-Item "$tmpFile"
+
+	# install lsp server
+	Invoke-Expression "opam install ocaml-lsp-server --confirm-level=yes"
+}
+
+function Add-Path($NewPath) {
+	$CurrentPath = (Get-ItemProperty -Path $RegPath -Name Path).Path
+	if ($CurrentPath -notlike "*$NewPath*") {
+		$CurrentPath = "$NewPath;$CurrentPath"
+		Set-ItemProperty -Path $RegPath -Name Path -Value $CurrentPath
+	}
+}
+
+function Setup-Paths {
+	Add-Path "$OpamRepo\default\bin"
+	Add-Path "$CygRoot\bin"
+	Add-Path "$CygRoot\usr\x86_64-w64-mingw32\bin"
+	Add-Path "$CygRoot\usr\x86_64-w64-mingw32\sys-root\mingw\bin"
+	Set-ItemProperty -Path $RegPath -Name OPAMROOT -Value $OpamRepo
+
+	# refresh for all processes (no need to restart)
+	$signature = @"
+[DllImport("user32.dll", CharSet = CharSet.Auto)]
+public static extern int SendMessageTimeout(IntPtr hWnd, int Msg, IntPtr wParam, string lParam, int fuFlags, int uTimeout, out IntPtr lpdwResult);
+"@
+	$SendMessageTimeout = Add-Type -MemberDefinition $signature -Name "Win32SendMessageTimeout" -Namespace Win32Functions -PassThru
+	$SendMessageTimeout::SendMessageTimeout([IntPtr]0xFFFF, 0x1A, [IntPtr]::Zero, "Environment", 2, 5000, [ref][IntPtr]::Zero)
+}
+
+Install-Init
+Install-Opam
+Install-Haxe-Deps
+Setup-Paths

+ 2 - 2
dune

@@ -1,2 +1,2 @@
-(dirs :standard \ tests std extra)
-(data_only_dirs lib)
+(dirs src libs)
+(data_only_dirs src-json)

+ 0 - 4
dune-project

@@ -4,7 +4,3 @@
 (package
 	(name haxe)
 )
-
-(package
-	(name haxe_prebuild)
-)

+ 0 - 2
dune-workspace.dev

@@ -1,2 +0,0 @@
-(lang dune 1.11)
-(profile release)

+ 72 - 0
extra/CHANGES.txt

@@ -1,3 +1,75 @@
+2024-08-07 4.3.6
+
+	Bugfixes:
+
+	display : do not define "display" for json rpc diagnostics (#11746)
+	cpp : null check interfaces (#11743)
+	hl : ignore WANT_READ/WANT_WRITE errors when the socket is known to be blocking (#11655)
+	hl : fix weird compiler error (#11690)
+	jvm : fix --java out -D jvm deprecation warning (#11739)
+	macro : Context.reportError should not abort build macros (#11741)
+
+2024-07-18 4.3.5
+
+	General improvements:
+
+	all : macOS universal binaries (#11572)
+	display : migrated diagnostics to Json RPC (#11707)
+	macro : expose TVar VStatic flag in macros. (#11683)
+
+	Bugfixes:
+
+	all : fix `@:structInit` with getter + setter (#11662)
+	all : add missing recursion when checking abstract casts (#11676)
+	all : fail nicer if unify_min can't find a common type (#11684)
+	all : fix pretty errors failure (#11700)
+	all : disallow local statics when inlining (#11725)
+	display : unused pattern variables should be marked as unused (#7282)
+	display : diagnostics miss "used without being initialized" errors (#7931)
+	display : recursive inline is not supported on enum abstract constructor (#11177)
+	display : Void as value error disappears on second compilation (#11184)
+	display : false positives of "This cast has no effect, but some of its sub-expressions" (#11203)
+	cpp : inherit `@:unreflective` on generic classes
+	hl : fix bit shift + assignment in while loop header (#10783)
+	hl : fix do-while loop in genhl+hlopt (#11461)
+	hl/c : use uint64 instead of uint64_t for shift cast (#11721)
+	macro : don't choke on namePos for reification pattern matching (#11671)
+
+	Deprecation / future version handling:
+
+	macro : `Compiler.include()` warning when used outside init macros
+
+2024-03-04 4.3.4
+
+	General improvements:
+
+	all : allow @:using with Class and Enum (#11553)
+	display : expose list of metadata/defines (#11399)
+
+	Bugfixes:
+
+	all : typedef vs. GADT (#11446)
+	all : don't double-throw exceptions (#11175)
+	all : fix some abstract inlining failures (#11526)
+	all : fix JsonPrinter empty parent class (#11560)
+	all : dce: clean up operator handling (#11427)
+	all : analyzer: deal with unreachable block in binops (#11402)
+	all : analyzer: don't recursively check enum values when const propagating (#11429)
+	all : analyzer: fix check for inlined purity meta
+	display : fix errors from parser missing in diagnostics (#8687)
+	display : fix display services with static extension (#11285)
+	display : fix display services with safe navigation (#11205)
+	hl : hlopt rework try-catch control flow (#11581)
+	hl/c : fix reserved keywords (#11408)
+
+	Deprecation / future version handling:
+
+	all : don't infer string on concat, when using -D haxe-next (#11318)
+	all : handle optional arguments with bind, when using -D haxe-next (#11533)
+	macro : build order vs inheritance, when using -D haxe-next (#11582)
+	macro : deprecate some API from haxe.macro.Compiler (see #11540)
+	java/jvm : warn about --java ... -D jvm vs --jvm ...
+
 2023-09-17 4.3.3
 
 	General improvements:

+ 23 - 3
extra/LICENSE.txt

@@ -1,9 +1,29 @@
 Haxe Licenses
 -------------
 
-For details about Haxe Licenses, please read http://haxe.org/foundation/open-source.html
+The Haxe toolkit is Free and Open-Source software that uses several licenses.
 
-The Haxe Standard Library MIT License :
+The Haxe compiler is licensed under the GNU GPL v2+ license (SPDX: GPL-2.0-or-later).
+
+The compiler is built around the Haxe source code base.
+
+If a file does not have a license header or does not fall under one of the exceptions listed below, 
+it should be assumed to be licensed under the GNU GPL v2+ license with the standard copyright notice:  
+Copyright (C) 2005-2024 Haxe Foundation.
+
+- The Haxe Standard Library is licensed under the MIT License, which is reproduced below.  
+  It is located in the `std/` directory, and the MIT license applies to this part of Haxe.
+
+- Haxe contains third-party source code, some of which is located in the `libs/` directory.  
+  Each third-party module includes its own license.  
+  For integration with Haxe, these modules may contain Haxe-related files such as a "dune" file
+  or other files for integration with Haxe or OCaml.
+  These files are licensed under the Haxe Compiler license (GNU GPL v2+).
+
+For a summary of Haxe licenses, please read [http://haxe.org/foundation/open-source.html](http://haxe.org/foundation/open-source.html).
+
+
+The Haxe Standard Library MIT License:
 --------------------------
 
 Copyright (C)2005-2016 Haxe Foundation
@@ -26,7 +46,7 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
 DEALINGS IN THE SOFTWARE.
 
-The Haxe compiler GPL License :
+The Haxe compiler GPL License:
 -------------------------------
 
 		    GNU GENERAL PUBLIC LICENSE

+ 16 - 12
extra/github-actions/build-mac.yml

@@ -8,33 +8,29 @@
     set -ex
     brew update
     brew bundle --file=tests/Brewfile --no-upgrade
-    cpanm IPC::System::Simple
-    cpanm String::ShellQuote
     curl -L https://github.com/madler/zlib/releases/download/v$ZLIB_VERSION/zlib-$ZLIB_VERSION.tar.gz | tar xz
     cd zlib-$ZLIB_VERSION
     ./configure
-    make && make install
+    sudo make && sudo make install
     cd ..
     curl -L https://github.com/ARMmbed/mbedtls/archive/v$MBEDTLS_VERSION.tar.gz | tar xz
     cd mbedtls-$MBEDTLS_VERSION
-    make && make install
+    sudo make && sudo make install
     cd ..
     curl -L https://github.com/PCRE2Project/pcre2/releases/download/pcre2-$PCRE2_VERSION/pcre2-$PCRE2_VERSION.tar.gz | tar xz
     cd pcre2-$PCRE2_VERSION
     ./configure --enable-unicode --enable-pcre2-8 --enable-pcre2-16 --enable-pcre2-32 --enable-unicode-properties --enable-pcre2grep-libz --enable-pcre2grep-libbz2 --enable-jit
-    make && make install
+    sudo make && sudo make install
     cd ..
 
 - name: Install OCaml libraries
   if: steps.cache-opam.outputs.cache-hit != 'true'
   run: |
     set -ex
-    opam init # --disable-sandboxing
+    opam init -c ${{ env.OCAML_VERSION }}
     opam update
-    opam switch create 4.08.1
     eval $(opam env)
     opam env
-    opam pin add ctypes 0.17.1 --yes
     opam pin add haxe . --no-action
     opam install haxe --deps-only --assume-depexts
     opam list
@@ -48,15 +44,23 @@
   run: |
     set -ex
     eval $(opam env)
-    opam config exec -- make -s -j`sysctl -n hw.ncpu` STATICLINK=1 "LIB_PARAMS=/usr/local/lib/libz.a /usr/local/lib/libpcre2-8.a /usr/local/lib/libmbedtls.a /usr/local/lib/libmbedcrypto.a /usr/local/lib/libmbedx509.a -cclib '-framework Security -framework CoreFoundation'" haxe
+    opam config exec -- make -s STATICLINK=1 "LIB_PARAMS=\"/usr/local/lib/libz.a\" \"/usr/local/lib/libpcre2-8.a\" \"/usr/local/lib/libmbedtls.a\" \"/usr/local/lib/libmbedcrypto.a\" \"/usr/local/lib/libmbedx509.a\"" haxe
     opam config exec -- make -s haxelib
     make -s package_unix package_installer_mac
     ls -l out
     otool -L ./haxe
     otool -L ./haxelib
 
-- name: Upload artifact
-  uses: actions/upload-artifact@v3
+- name: Upload artifact (x64)
+  if: runner.arch == 'X64'
+  uses: actions/upload-artifact@v4
   with:
-    name: macBinaries
+    name: macX64Binaries
+    path: out
+
+- name: Upload artifact (arm)
+  if: runner.arch == 'ARM64'
+  uses: actions/upload-artifact@v4
+  with:
+    name: macArmBinaries
     path: out

+ 10 - 26
extra/github-actions/build-windows.yml

@@ -1,14 +1,3 @@
-- name: Expose mingw dll files
-  shell: pwsh
-  run: Write-Host "::add-path::${env:CYG_ROOT}/usr/$($env:MINGW_ARCH)-w64-mingw32/sys-root/mingw/bin"
-
-# required to be able to retrieve the revision
-- name: Mark directory as safe
-  shell: pwsh
-  run: |
-    Set-PSDebug -Trace 1
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'git config --global --add safe.directory "$OLDPWD"')
-
 - name: Set ADD_REVISION=1 for non-release
   if: ${{ !startsWith(github.ref, 'refs/tags/') }}
   shell: pwsh
@@ -18,23 +7,18 @@
   shell: pwsh
   run: |
     Set-PSDebug -Trace 1
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -s -f Makefile.win -j`nproc` haxe 2>&1')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -s -f Makefile.win haxelib 2>&1')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -f Makefile.win echo_package_files package_bin package_installer_win package_choco 2>&1')
-    dir out
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && cygcheck ./haxe.exe')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && cygcheck ./haxelib.exe')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && ls ./out')
-
-- name: Check artifact
-  shell: bash
-  run: |
-    ls out
-    # Output should contain binaries zip, installer zip and nupkg
-    [ $(ls -1 out | wc -l) -eq "3" ]
+    # stop after any command returns an error
+    $PSNativeCommandUseErrorActionPreference = $true
+    $ErrorActionPreference = 'Stop'
+    opam exec -- make -s -f Makefile.win -j"$env:NUMBER_OF_PROCESSORS" haxe
+    opam exec -- make -s -f Makefile.win haxelib
+    opam exec -- make -f Makefile.win echo_package_files package_bin package_installer_win package_choco
+    cygcheck ./haxe.exe
+    cygcheck ./haxelib.exe
+    ls ./out
 
 - name: Upload artifact
-  uses: actions/upload-artifact@v3
+  uses: actions/upload-artifact@v4
   with:
     name: win${{env.ARCH}}Binaries
     path: out

+ 2 - 0
extra/github-actions/install-neko-unix.yml

@@ -6,9 +6,11 @@
     tar -xf $RUNNER_TEMP/neko_latest.tar.gz -C $RUNNER_TEMP
     NEKOPATH=`echo $RUNNER_TEMP/neko-*-*`
     sudo mkdir -p /usr/local/bin
+    sudo mkdir -p /usr/local/include
     sudo mkdir -p /usr/local/lib/neko
     sudo ln -s $NEKOPATH/{neko,nekoc,nekoml,nekotools}  /usr/local/bin/
     sudo ln -s $NEKOPATH/libneko.*                      /usr/local/lib/
+    sudo ln -s $NEKOPATH/include/*                      /usr/local/include/
     sudo ln -s $NEKOPATH/*.ndll                         /usr/local/lib/neko/
     echo "NEKOPATH=$NEKOPATH" >> $GITHUB_ENV
 

+ 1 - 1
extra/github-actions/install-nsis.yml

@@ -1,5 +1,5 @@
 - name: choco install nsis
-  uses: nick-invision/retry@v2
+  uses: nick-invision/retry@v3
   with:
     timeout_minutes: 10
     max_attempts: 10

+ 15 - 5
extra/github-actions/install-ocaml-libs-windows.yml

@@ -1,6 +1,16 @@
 - name: Install OCaml libraries
-  shell: pwsh
-  run: |
-    Set-PSDebug -Trace 1
-    opam install haxe --deps-only
-    opam list
+  uses: nick-fields/retry@v3
+  with:
+    timeout_minutes: 10
+    max_attempts: 10
+    retry_on: timeout
+    shell: pwsh
+    command: |
+      Set-PSDebug -Trace 1
+      # stop after any command returns an error
+      $PSNativeCommandUseErrorActionPreference = $true
+      $ErrorActionPreference = 'Stop'
+      # see: https://github.com/aantron/luv/issues/162
+      $env:PATH="${env:CYG_ROOT}\bin;${env:CYG_ROOT}\usr\x86_64-w64-mingw32\bin;${env:PATH}"
+      opam install haxe --deps-only
+      opam list

+ 7 - 31
extra/github-actions/install-ocaml-windows.yml

@@ -1,39 +1,15 @@
 - name: Setup ocaml
-  id: ocaml
-  continue-on-error: true
-  uses: kLabz/setup-ocaml@win32
+  uses: ocaml/setup-ocaml@v3
   with:
-    ocaml-compiler: 4.08.1
-    opam-depext: false
-    opam-repositories: |
-      opam-repository-mingw: https://github.com/ocaml-opam/opam-repository-mingw.git#sunset
-      default: https://github.com/ocaml/opam-repository.git
+    ocaml-compiler: ${{ env.OCAML_VERSION }}
     opam-local-packages: |
       haxe.opam
-    cache-prefix: w32-v1
-
-# TODO make it work on first try
-# (when cygwin cache doesn't exist, ocaml install fails with a curl error)
-- name: Setup ocaml (second chance)
-  if: steps.ocaml.outcome == 'failure'
-  uses: kLabz/setup-ocaml@win32
-  with:
-    ocaml-compiler: 4.08.1
-    opam-depext: false
-    opam-repositories: |
-      opam-repository-mingw: https://github.com/ocaml-opam/opam-repository-mingw.git#sunset
-      default: https://github.com/ocaml/opam-repository.git
-    opam-local-packages: |
-      haxe.opam
-    cache-prefix: w32-v1
 
 - name: Install dependencies
   shell: pwsh
+  env:
+    MBEDTLS_VERSION: 2.16.3
   run: |
-    Set-PSDebug -Trace 1
-    curl.exe -fsSL -o "libmbedtls.tar.xz" --retry 3 https://github.com/Simn/mingw64-mbedtls/releases/download/2.16.3/mingw64-$($env:MINGW_ARCH)-mbedtls-2.16.3-1.tar.xz
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'curl -L https://cpanmin.us | perl - App::cpanminus')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm IPC::System::Simple module')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm String::ShellQuote')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'echo "$OLDPWD"')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && tar -C / -xvf libmbedtls.tar.xz')
+    curl.exe -fsSL -o "libmbedtls.tar.xz" --retry 3 `
+      https://github.com/Simn/mingw64-mbedtls/releases/download/${{ env.MBEDTLS_VERSION }}/mingw64-${{ env.MINGW_ARCH }}-mbedtls-${{ env.MBEDTLS_VERSION }}-1.tar.xz
+    ${{ env.CYG_ROOT }}\bin\tar.exe -C ${{ env.CYG_ROOT }} -xvf libmbedtls.tar.xz

+ 0 - 20
extra/github-actions/install-ocaml-windows64.yml

@@ -1,20 +0,0 @@
-- name: Setup ocaml
-  uses: ocaml/setup-ocaml@v2
-  with:
-    ocaml-compiler: 4.08.1
-    opam-repositories: |
-      opam-repository-mingw: https://github.com/ocaml-opam/opam-repository-mingw.git#sunset
-      default: https://github.com/ocaml/opam-repository.git
-    opam-local-packages: |
-      haxe.opam
-
-- name: Install dependencies
-  shell: pwsh
-  run: |
-    Set-PSDebug -Trace 1
-    curl.exe -fsSL -o "libmbedtls.tar.xz" --retry 3 https://github.com/Simn/mingw64-mbedtls/releases/download/2.16.3/mingw64-$($env:MINGW_ARCH)-mbedtls-2.16.3-1.tar.xz
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'curl -L https://cpanmin.us | perl - App::cpanminus')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm IPC::System::Simple module')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm String::ShellQuote')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'echo "$OLDPWD"')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && tar -C / -xvf libmbedtls.tar.xz')

+ 1 - 1
extra/github-actions/test-windows.yml

@@ -1,4 +1,4 @@
-- uses: actions/setup-node@v3
+- uses: actions/setup-node@v4
   with:
     node-version: 18.17.1
 

+ 177 - 82
extra/github-actions/workflows/main.yml

@@ -3,6 +3,13 @@
 name: CI
 on: [push, pull_request]
 
+env:
+  OCAML_VERSION: 5.3.0
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
 jobs:
   windows64-build:
     runs-on: windows-latest
@@ -17,26 +24,17 @@ jobs:
         with:
           submodules: recursive
 
-      - name: Use GNU Tar from msys
-        run: |
-          echo "C:\msys64\usr\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
-          rm C:\msys64\usr\bin\bash.exe
-
       @import install-nsis.yml
       @import install-neko-windows.yml
-      @import install-ocaml-windows64.yml
+      @import install-ocaml-windows.yml
       @import install-ocaml-libs-windows.yml
       @import build-windows.yml
 
   linux-build:
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       OPAMYES: 1
-    strategy:
-      fail-fast: false
-      matrix:
-        ocaml: ["4.08.1", "5.0.0"]
     steps:
       - uses: actions/checkout@main
         with:
@@ -44,28 +42,27 @@ jobs:
 
       - name: Cache opam
         id: cache-opam
-        uses: actions/cache@v3.0.11
+        uses: actions/cache@v4
         with:
           path: ~/.opam/
-          key: ${{ runner.os }}-${{ matrix.ocaml }}-${{ hashFiles('./haxe.opam', './libs/') }}
+          key: ${{ runner.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
       @import install-neko-unix.yml
 
       - name: Install dependencies
         run: |
           set -ex
-          sudo add-apt-repository ppa:avsm/ppa -y # provides OPAM 2
-          sudo add-apt-repository ppa:haxe/ocaml -y # provides newer version of mbedtls
           sudo apt-get update -qqy
-          sudo apt-get install -qqy ocaml-nox camlp5 opam libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build libstring-shellquote-perl libipc-system-simple-perl
+          sudo apt-get install -qqy darcs bubblewrap ocaml-nox libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build
+          curl -sSL https://github.com/ocaml/opam/releases/download/2.3.0/opam-2.3.0-x86_64-linux -o $RUNNER_TEMP/opam
+          sudo install $RUNNER_TEMP/opam /usr/local/bin/opam
 
       - name: Install OCaml libraries
         if: steps.cache-opam.outputs.cache-hit != 'true'
         run: |
           set -ex
-          opam init # --disable-sandboxing
+          opam init -c ${{ env.OCAML_VERSION }}
           opam update
-          opam switch create ${{ matrix.ocaml }}
           opam pin add haxe . --no-action
           opam install haxe --deps-only --assume-depexts
           opam list
@@ -93,7 +90,6 @@ jobs:
         run: echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
 
       - name: Build xmldoc
-        if: matrix.ocaml == '4.08.1'
         run: |
           set -ex
           make -s xmldoc
@@ -105,21 +101,20 @@ jobs:
           EOL
 
       - name: Upload artifact
-        uses: actions/upload-artifact@v3
+        uses: actions/upload-artifact@v4
         with:
-          name: linuxBinaries${{ (matrix.ocaml == '5.0.0' && '_ocaml5') || '' }}
+          name: linuxBinaries
           path: out
 
       - name: Upload xmldoc artifact
-        uses: actions/upload-artifact@v3
-        if: matrix.ocaml == '4.08.1'
+        uses: actions/upload-artifact@v4
         with:
           name: xmldoc
           path: extra/doc
 
   linux-test:
     needs: linux-build
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       TEST: ${{matrix.target}}
@@ -128,7 +123,6 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        ocaml: ["4.08.1", "5.0.0"]
         target: [macro, js, hl, cpp, jvm, php, python, lua, flash, neko]
         include:
           - target: hl
@@ -143,9 +137,9 @@ jobs:
       - uses: actions/checkout@main
         with:
           submodules: recursive
-      - uses: actions/download-artifact@v3
+      - uses: actions/download-artifact@v4
         with:
-          name: linuxBinaries${{ (matrix.ocaml == '5.0.0' && '_ocaml5') || '' }}
+          name: linuxBinaries
           path: linuxBinaries
 
       @import install-neko-unix.yml
@@ -188,7 +182,7 @@ jobs:
 
   test-docgen:
     needs: linux-build
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       HXCPP_COMPILE_CACHE: ~/hxcache
@@ -197,13 +191,13 @@ jobs:
         with:
           submodules: recursive
 
-      - uses: actions/download-artifact@v3
+      - uses: actions/download-artifact@v4
         with:
           name: linuxBinaries
           path: linuxBinaries
 
       - name: Download xmldoc artifact
-        uses: actions/download-artifact@v3
+        uses: actions/download-artifact@v4
         with:
           name: xmldoc
           path: xmldoc
@@ -248,66 +242,134 @@ jobs:
           cpp/Dox -i ../../xmldoc -ex microsoft -ex javax -theme $(haxelib libpath dox)/themes/default
         working-directory: ${{github.workspace}}/tests/docgen
 
-  linux-arm64:
-    runs-on: ubuntu-20.04
-    permissions:
-      packages: write
+  linux-arm64-build:
+    runs-on: ubuntu-22.04-arm
     env:
-      FORCE_COLOR: 1
+      PLATFORM: linux-arm64
+      OPAMYES: 1
     steps:
-      - name: Login to GitHub Container Registry
-        uses: docker/login-action@v2
+      - uses: actions/checkout@main
         with:
-          registry: ghcr.io
-          username: ${{ github.actor }}
-          password: ${{ secrets.GITHUB_TOKEN }}
-
-      - name: Install Earthly
-        run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/download/v0.6.13/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete'
+          submodules: recursive
 
-      - name: Set up QEMU
-        id: qemu
-        uses: docker/setup-qemu-action@v2
+      - name: Cache opam
+        id: cache-opam
+        uses: actions/cache@v4
         with:
-            image: tonistiigi/binfmt:latest
-            platforms: all
+          path: ~/.opam/
+          key: arm-${{ runner.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
-      - uses: actions/checkout@main
-        with:
-          submodules: recursive
+      @import install-neko-unix.yml
 
-      - name: Set CONTAINER_ vars
+      - name: Install dependencies
         run: |
-          echo "CONTAINER_REG=$(echo ${{ github.repository }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV;
-          echo "CONTAINER_TAG=$(echo ${{ github.ref_name }} | sed -e 's/[^A-Za-z0-9\.]/-/g')" >> $GITHUB_ENV;
+          set -ex
+          sudo apt-get update -qqy
+          sudo apt-get install -qqy opam libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build
 
-      - name: Build devcontainer
-        run: earthly --platform=linux/arm64 +devcontainer --IMAGE_NAME="ghcr.io/${CONTAINER_REG}_devcontainer" --IMAGE_TAG="${CONTAINER_TAG}-arm64" --IMAGE_CACHE="ghcr.io/haxefoundation/haxe_devcontainer:development-arm64"
-        env:
-          EARTHLY_PUSH: "${{ github.event_name == 'push' }}"
-          EARTHLY_USE_INLINE_CACHE: true
-          EARTHLY_SAVE_INLINE_CACHE: true
+      - name: Install OCaml libraries
+        if: steps.cache-opam.outputs.cache-hit != 'true'
+        run: |
+          set -ex
+          opam init -c ${{ env.OCAML_VERSION }}
+          opam pin add haxe . --no-action
+          opam install haxe --deps-only --assume-depexts
+          opam list
+          ocamlopt -v
 
       - name: Set ADD_REVISION=1 for non-release
         if: ${{ !startsWith(github.ref, 'refs/tags/') }}
         run: echo "ADD_REVISION=1" >> $GITHUB_ENV
 
-      - name: Build
-        run: earthly --platform=linux/arm64 +build --ADD_REVISION="$ADD_REVISION" --SET_SAFE_DIRECTORY="true"
-        env:
-          EARTHLY_PUSH: "${{ github.event_name == 'push' }}"
-          EARTHLY_REMOTE_CACHE: "ghcr.io/${{env.CONTAINER_REG}}_cache:build-${{env.CONTAINER_TAG}}-arm64"
+      - name: Build Haxe
+        run: |
+          set -ex
+          eval $(opam env)
+          opam config exec -- make -s -j`nproc` STATICLINK=1 haxe
+          opam config exec -- make -s haxelib
+          make -s package_unix
+          ls -l out
+          ldd -v ./haxe
+          ldd -v ./haxelib
+
+      # https://stackoverflow.com/questions/58033366/how-to-get-current-branch-within-github-actions
+      - name: Extract branch name
+        id: extract_branch
+        shell: bash
+        run: echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
 
       - name: Upload artifact
-        uses: actions/upload-artifact@v3
+        uses: actions/upload-artifact@v4
         with:
           name: linuxArm64Binaries
-          path: out/linux/arm64
+          path: out
+
+  linux-arm64-test:
+    needs: linux-arm64-build
+    runs-on: ubuntu-22.04-arm
+    env:
+      PLATFORM: linux-arm64
+      TEST: ${{matrix.target}}
+      HXCPP_COMPILE_CACHE: ~/hxcache
+      HAXE_STD_PATH: /usr/local/share/haxe/std
+    strategy:
+      fail-fast: false
+      matrix:
+        target: [macro, js, cpp, jvm, php, python, lua, neko]
+        include:
+          - target: lua
+            APT_PACKAGES: ncurses-dev
+    steps:
+      - uses: actions/checkout@main
+        with:
+          submodules: recursive
+      - uses: actions/download-artifact@v4
+        with:
+          name: linuxArm64Binaries
+          path: linuxBinaries
+
+      @import install-neko-unix.yml
+
+      - name: Setup Haxe
+        run: |
+          sudo apt install -qqy libmbedtls-dev
+
+          set -ex
+          tar -xf linuxBinaries/*_bin.tar.gz -C linuxBinaries --strip-components=1
+          sudo mkdir -p /usr/local/bin/
+          sudo mkdir -p /usr/local/share/haxe/
+          sudo ln -s `pwd`/linuxBinaries/haxe /usr/local/bin/haxe
+          sudo ln -s `pwd`/linuxBinaries/haxelib /usr/local/bin/haxelib
+          sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
+
+      - name: Print Haxe version
+        run: haxe -version
+
+      - name: Setup haxelib
+        run: |
+          set -ex
+          mkdir ~/haxelib
+          haxelib setup ~/haxelib
+
+      - name: Install apt packages
+        if: matrix.APT_PACKAGES
+        run: |
+          set -ex
+          sudo apt update -qqy
+          sudo apt install -qqy ${{matrix.APT_PACKAGES}}
+
+      - name: Test
+        run: haxe RunCi.hxml
+        working-directory: ${{github.workspace}}/tests
 
   mac-build:
-    runs-on: macos-latest
+    strategy:
+      fail-fast: false
+      matrix:
+        os: [macos-14, macos-13]
+    runs-on: ${{ matrix.os }}
     env:
-      PLATFORM: mac
+      PLATFORM: mac${{ matrix.os == 'macos-14' && '-arm64' || '' }}
       OPAMYES: 1
       MACOSX_DEPLOYMENT_TARGET: 10.13
     steps:
@@ -317,10 +379,10 @@ jobs:
 
       - name: Cache opam
         id: cache-opam
-        uses: actions/cache@v3.0.11
+        uses: actions/cache@v4
         with:
           path: ~/.opam/
-          key: ${{ runner.os }}-${{ hashFiles('./haxe.opam', './libs/') }}
+          key: ${{ matrix.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
       @import install-neko-unix.yml
       @import build-mac.yml
@@ -343,7 +405,7 @@ jobs:
       - uses: actions/checkout@main
         with:
           submodules: recursive
-      - uses: actions/download-artifact@v3
+      - uses: actions/download-artifact@v4
         with:
           name: win${{env.ARCH}}Binaries
           path: win${{env.ARCH}}Binaries
@@ -351,9 +413,42 @@ jobs:
       @import install-neko-windows.yml
       @import test-windows.yml
 
-  mac-test:
+  mac-build-universal:
     needs: mac-build
     runs-on: macos-latest
+    steps:
+      - name: Checkout the repository
+        uses: actions/checkout@main
+      - uses: actions/download-artifact@v4
+        with:
+          name: macX64Binaries
+          path: macX64Binaries
+      - uses: actions/download-artifact@v4
+        with:
+          name: macArmBinaries
+          path: macArmBinaries
+
+      - name: Make universal binary
+        run: |
+          set -ex
+          tar -xf macX64Binaries/*_bin.tar.gz -C macX64Binaries --strip-components=1
+          tar -xf macArmBinaries/*_bin.tar.gz -C macArmBinaries --strip-components=1
+          lipo -create -output haxe macX64Binaries/haxe macArmBinaries/haxe
+          lipo -create -output haxelib macX64Binaries/haxelib macArmBinaries/haxelib
+          make -s package_unix package_installer_mac PACKAGE_INSTALLER_MAC_ARCH=universal
+          ls -l out
+          otool -L ./haxe
+          otool -L ./haxelib
+
+      - name: Upload artifact (universal)
+        uses: actions/upload-artifact@v4
+        with:
+          name: macBinaries
+          path: out
+
+  mac-test:
+    needs: mac-build-universal
+    runs-on: macos-13
     env:
       PLATFORM: mac
       TEST: ${{matrix.target}}
@@ -362,7 +457,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        target: [macro, js, hl, cpp, jvm, php, python, flash, neko]
+        target: [macro, js, hl, cpp, jvm, php, python, lua, flash, neko]
         include:
           - target: hl
             BREW_PACKAGES: ninja
@@ -370,7 +465,7 @@ jobs:
       - uses: actions/checkout@main
         with:
           submodules: recursive
-      - uses: actions/download-artifact@v3
+      - uses: actions/download-artifact@v4
         with:
           name: macBinaries
           path: macBinaries
@@ -380,8 +475,8 @@ jobs:
 
   deploy:
     if: success() && github.repository_owner == 'HaxeFoundation' && github.event_name != 'pull_request'
-    needs: [linux-test, linux-arm64, mac-test, windows64-test]
-    runs-on: ubuntu-20.04
+    needs: [linux-test, linux-arm64-test, mac-test, windows64-test]
+    runs-on: ubuntu-22.04
     steps:
       # this is only needed for to get `COMMIT_DATE`...
       # maybe https://github.community/t/expose-commit-timestamp-in-the-github-context-data/16460/3
@@ -390,7 +485,7 @@ jobs:
         uses: actions/checkout@main
 
       - name: Download build artifacts
-        uses: actions/download-artifact@v3
+        uses: actions/download-artifact@v4
 
       - name: Install awscli
         run: |
@@ -450,8 +545,8 @@ jobs:
 
   deploy_apidoc:
     if: success() && github.repository_owner == 'HaxeFoundation' && github.event_name != 'pull_request'
-    needs: [linux-test, linux-arm64, mac-test, windows64-test]
-    runs-on: ubuntu-20.04
+    needs: [linux-test, linux-arm64-test, mac-test, windows64-test]
+    runs-on: ubuntu-22.04
     steps:
       - name: Install dependencies
         run: |
@@ -459,7 +554,7 @@ jobs:
           sudo apt-get install -qqy libc6
 
       - name: Download Haxe
-        uses: actions/download-artifact@v3
+        uses: actions/download-artifact@v4
         with:
           name: linuxBinaries
           path: linuxBinaries
@@ -475,7 +570,7 @@ jobs:
           sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
 
       - name: Download xmldoc artifact
-        uses: actions/download-artifact@v3
+        uses: actions/download-artifact@v4
         with:
           name: xmldoc
           path: xmldoc

+ 1 - 1
extra/haxelib_src

@@ -1 +1 @@
-Subproject commit 98637027327d8cf385d302acaaf104bd6107d2bf
+Subproject commit 5a836287828fdaeb6aa91695a5eb399cee0f6640

+ 6 - 5
haxe.opam

@@ -11,7 +11,7 @@ maintainer: ["Haxe Foundation <[email protected]>" "Andy Li <[email protected]>
 authors: "Haxe Foundation <[email protected]>"
 homepage: "https://haxe.org/"
 bug-reports: "https://github.com/HaxeFoundation/haxe/issues"
-license: ["GPL2+" "MIT"]
+license: ["GPL-2.0-or-later" "MIT"]
 dev-repo: "git+https://github.com/HaxeFoundation/haxe.git"
 build: [
   [make]
@@ -19,19 +19,20 @@ build: [
 install: [make "install" "INSTALL_DIR=%{prefix}%"]
 remove: [make "uninstall" "INSTALL_DIR=%{prefix}%"]
 depends: [
-  ("ocaml" {>= "5.0"} & ("camlp5" {build}))
-    | ("ocaml" {>= "4.08" & < "5.0"} & ("camlp5" {build & = "8.00.03"}))
+  "ocaml"
   "ocamlfind" {build}
-  "dune" {>= "1.11"}
+  "dune" {>= "3.17"}
   "sedlex" {>= "2.0"}
   "xml-light"
   "extlib" {>= "1.7.8"}
   "sha"
   "camlp-streams"
+  "ppx_parser" {>= "0.2.0"}
   "conf-libpcre2-8"
   "conf-zlib"
   "conf-neko"
-  "luv" {>= "0.5.12"}
+  "luv" {>= "0.5.13"}
   "ipaddr"
   "terminal_size"
+  "domainslib"
 ]

+ 0 - 18
libs/.gitignore

@@ -1,18 +0,0 @@
-*.obj
-*.o
-*.cmx
-*.cmi
-*.cmxa
-*.a
-*.exe
-.*.swp
-*.lib
-
-/xml-light/doc
-/xml-light/xml_lexer.ml
-/xml-light/xml_parser.ml
-/xml-light/xml_parser.mli
-
-/ilib/dump
-*.cmo
-*.cma

+ 0 - 23
libs/Makefile

@@ -1,23 +0,0 @@
-OCAMLOPT = ocamlopt
-OCAMLC = ocamlc
-TARGET_FLAG = all
-LIBS=extlib-leftovers extc neko javalib ilib swflib objsize pcre2 ziplib
-
-all: $(LIBS)
-$(LIBS):
-	$(MAKE) -C $@ OCAMLOPT=$(OCAMLOPT) OCAMLC=$(OCAMLC) $(TARGET_FLAG)
-
-clean:
-	$(MAKE) -C extlib-leftovers clean
-	$(MAKE) -C extc clean
-	$(MAKE) -C neko clean
-	$(MAKE) -C javalib clean
-	$(MAKE) -C ilib clean
-	$(MAKE) -C swflib clean
-	$(MAKE) -C objsize clean
-	$(MAKE) -C pcre2 clean
-	$(MAKE) -C ziplib clean
-
-.PHONY: all clean $(LIBS)
-
-Makefile: ;

+ 0 - 30
libs/extc/Makefile

@@ -1,30 +0,0 @@
-ALL_CFLAGS = $(CFLAGS)
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-SRC = extc.ml process.ml extc_stubs.c process_stubs.c
-
-all: bytecode native
-
-bytecode: extc.cma
-
-native: extc.cmxa
-
-extc.cma: extc_stubs.o process_stubs.o extc.ml process.ml
-	ocamlfind $(OCAMLC) -safe-string -a -o extc.cma -package extlib extc.ml process.ml
-
-extc.cmxa: extc.ml process.ml extc_stubs.o process_stubs.o
-	ocamlfind $(OCAMLOPT) -safe-string -a -o extc.cmxa -package extlib extc.ml process.ml
-
-extc_stubs.o: extc_stubs.c
-	ocamlfind $(OCAMLC) -safe-string $(ALL_CFLAGS) extc_stubs.c
-
-process_stubs.o: process_stubs.c
-	ocamlfind $(OCAMLC) -safe-string $(ALL_CFLAGS) process_stubs.c
-
-clean:
-	rm -f extc.cma extc.cmi extc.cmx extc.cmxa extc.o extc.obj extc.lib extc_stubs.obj extc_stubs.o process.cmx process.obj process.cmi process.o process_stubs.obj process_stubs.o
-	rm -f extc.a libextc.a libextc.lib extc.cmo process.cmo
-
-.PHONY: all bytecode native clean
-Makefile: ;
-$(SRC): ;

+ 1 - 7
libs/extc/process_stubs.c

@@ -37,13 +37,7 @@
 #	include <unistd.h>
 #	include <errno.h>
 #	include <string.h>
-#	ifndef __APPLE__
-#		if defined(__FreeBSD__) || defined(__DragonFly__)
-#			include <sys/wait.h>
-#		else
-#			include <wait.h>
-#		endif
-#	endif
+#	include <sys/wait.h>
 #endif
 
 #ifdef _WIN32

+ 501 - 0
libs/extlib-leftovers/LICENSE

@@ -0,0 +1,501 @@
+                  GNU LESSER GENERAL PUBLIC LICENSE
+                       Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ <https://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+                            Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+  This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it.  You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations below.
+
+  When we speak of free software, we are referring to freedom of use,
+not price.  Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+  To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights.  These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+  For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you.  You must make sure that they, too, receive or can get the source
+code.  If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it.  And you must show them these terms so they know their rights.
+
+  We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+  To protect each distributor, we want to make it very clear that
+there is no warranty for the free library.  Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+  Finally, software patents pose a constant threat to the existence of
+any free program.  We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder.  Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+  Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License.  This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License.  We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+  When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library.  The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom.  The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+  We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License.  It also provides other free software developers Less
+of an advantage over competing non-free programs.  These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries.  However, the Lesser license provides advantages in certain
+special circumstances.
+
+  For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it becomes
+a de-facto standard.  To achieve this, non-free programs must be
+allowed to use the library.  A more frequent case is that a free
+library does the same job as widely used non-free libraries.  In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+  In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software.  For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+  Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.  Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library".  The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+                  GNU LESSER GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+  A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+  The "Library", below, refers to any such software library or work
+which has been distributed under these terms.  A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language.  (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+  "Source code" for a work means the preferred form of the work for
+making modifications to it.  For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control compilation
+and installation of the library.
+
+  Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it).  Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+  1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+  You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+  2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+
+    b) You must cause the files modified to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    c) You must cause the whole of the work to be licensed at no
+    charge to all third parties under the terms of this License.
+
+    d) If a facility in the modified Library refers to a function or a
+    table of data to be supplied by an application program that uses
+    the facility, other than as an argument passed when the facility
+    is invoked, then you must make a good faith effort to ensure that,
+    in the event an application does not supply such function or
+    table, the facility still operates, and performs whatever part of
+    its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has
+    a purpose that is entirely well-defined independent of the
+    application.  Therefore, Subsection 2d requires that any
+    application-supplied function or table used by this function must
+    be optional: if the application does not supply it, the square
+    root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library.  To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License.  (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.)  Do not make any other change in
+these notices.
+
+  Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+  This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+  4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+  If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library".  Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+  However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library".  The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+  When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library.  The
+threshold for this to be true is not precisely defined by law.
+
+  If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work.  (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+  Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+  6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+  You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License.  You must supply a copy of this License.  If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License.  Also, you must do one
+of these things:
+
+    a) Accompany the work with the complete corresponding
+    machine-readable source code for the Library including whatever
+    changes were used in the work (which must be distributed under
+    Sections 1 and 2 above); and, if the work is an executable linked
+    with the Library, with the complete machine-readable "work that
+    uses the Library", as object code and/or source code, so that the
+    user can modify the Library and then relink to produce a modified
+    executable containing the modified Library.  (It is understood
+    that the user who changes the contents of definitions files in the
+    Library will not necessarily be able to recompile the application
+    to use the modified definitions.)
+
+    b) Use a suitable shared library mechanism for linking with the
+    Library.  A suitable mechanism is one that (1) uses at run time a
+    copy of the library already present on the user's computer system,
+    rather than copying library functions into the executable, and (2)
+    will operate properly with a modified version of the library, if
+    the user installs one, as long as the modified version is
+    interface-compatible with the version that the work was made with.
+
+    c) Accompany the work with a written offer, valid for at
+    least three years, to give the same user the materials
+    specified in Subsection 6a, above, for a charge no more
+    than the cost of performing this distribution.
+
+    d) If distribution of the work is made by offering access to copy
+    from a designated place, offer equivalent access to copy the above
+    specified materials from the same place.
+
+    e) Verify that the user has already received a copy of these
+    materials or that you have already sent this user a copy.
+
+  For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it.  However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+  It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system.  Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+  7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work
+    based on the Library, uncombined with any other library
+    facilities.  This must be distributed under the terms of the
+    Sections above.
+
+    b) Give prominent notice with the combined library of the fact
+    that part of it is a work based on the Library, and explaining
+    where to find the accompanying uncombined form of the same work.
+
+  8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License.  Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License.  However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+  9. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Library or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+  10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+  11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any
+particular circumstance, the balance of the section is intended to apply,
+and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License may add
+an explicit geographical distribution limitation excluding those countries,
+so that distribution is permitted only in or among countries not thus
+excluded.  In such case, this License incorporates the limitation as if
+written in the body of this License.
+
+  13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation.  If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+  14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission.  For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this.  Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+                            NO WARRANTY
+
+  15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU.  SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+                     END OF TERMS AND CONDITIONS
+
+           How to Apply These Terms to Your New Libraries
+
+  If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change.  You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms of the
+ordinary General Public License).
+
+  To apply these terms, attach the following notices to the library.  It is
+safest to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the library's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, see <https://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the library, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the
+  library `Frob' (a library for tweaking knobs) written by James Random Hacker.
+
+  <signature of Moe Ghoul>, 1 April 1990
+  Moe Ghoul, President of Vice
+
+That's all there is to it!

+ 0 - 35
libs/extlib-leftovers/Makefile

@@ -1,35 +0,0 @@
-# Makefile contributed by Alain Frisch
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-
-MODULES = \
- multiArray rbuffer uCharExt uTF8
-
-# the list is topologically sorted
-
-MLI = $(MODULES:=.mli)
-SRC = $(MLI) $(MODULES:=.ml)
-
-all: bytecode native
-
-opt: native
-
-bytecode: extlib-leftovers.cma
-
-native: extlib-leftovers.cmxa
-
-extlib-leftovers.cma: $(SRC)
-	$(OCAMLC) -safe-string -a -o extlib-leftovers.cma $(SRC)
-
-extlib-leftovers.cmxa: $(SRC)
-	$(OCAMLOPT) -safe-string -g -a -o extlib-leftovers.cmxa $(SRC)
-
-clean:
-	rm -f $(wildcard *.cmo) $(wildcard *.cmx) $(wildcard *.o) $(wildcard *.cmi) $(wildcard *.cma) $(wildcard *.cmxa) $(wildcard *.a) $(wildcard *.lib) $(wildcard *.obj)
-	rm -Rf doc
-
-.PHONY: all opt bytecode native doc copy install uninstall clean
-
-Makefile: ;
-
-$(SRC): ;

+ 2 - 2
libs/mbedtls/mbedtls.ml

@@ -43,8 +43,8 @@ external mbedtls_ssl_setup : mbedtls_ssl_context -> mbedtls_ssl_config -> mbedtl
 external mbedtls_ssl_write : mbedtls_ssl_context -> bytes -> int -> int -> mbedtls_result = "ml_mbedtls_ssl_write"
 
 external mbedtls_pk_init : unit -> mbedtls_pk_context = "ml_mbedtls_pk_init"
-external mbedtls_pk_parse_key : mbedtls_pk_context -> bytes -> string option -> mbedtls_result = "ml_mbedtls_pk_parse_key"
-external mbedtls_pk_parse_keyfile : mbedtls_pk_context -> string -> string option -> mbedtls_result = "ml_mbedtls_pk_parse_keyfile"
+external mbedtls_pk_parse_key : mbedtls_pk_context -> bytes -> string option -> mbedtls_ctr_drbg_context -> mbedtls_result = "ml_mbedtls_pk_parse_key"
+external mbedtls_pk_parse_keyfile : mbedtls_pk_context -> string -> string option -> mbedtls_ctr_drbg_context -> mbedtls_result = "ml_mbedtls_pk_parse_keyfile"
 external mbedtls_pk_parse_public_keyfile : mbedtls_pk_context -> string -> mbedtls_result = "ml_mbedtls_pk_parse_public_keyfile"
 external mbedtls_pk_parse_public_key : mbedtls_pk_context -> bytes -> mbedtls_result = "ml_mbedtls_pk_parse_public_key"
 

+ 98 - 52
libs/mbedtls/mbedtls_stubs.c

@@ -1,4 +1,3 @@
-#include <ctype.h>
 #include <string.h>
 #include <stdio.h>
 
@@ -18,13 +17,10 @@
 #include <caml/callback.h>
 #include <caml/custom.h>
 
-#include "mbedtls/debug.h"
 #include "mbedtls/error.h"
-#include "mbedtls/config.h"
 #include "mbedtls/ssl.h"
 #include "mbedtls/entropy.h"
 #include "mbedtls/ctr_drbg.h"
-#include "mbedtls/certs.h"
 #include "mbedtls/oid.h"
 
 #define PVoid_val(v) (*((void**) Data_custom_val(v)))
@@ -84,7 +80,7 @@ CAMLprim value ml_mbedtls_ctr_drbg_init(void) {
 
 CAMLprim value ml_mbedtls_ctr_drbg_random(value p_rng, value output, value output_len) {
 	CAMLparam3(p_rng, output, output_len);
-	CAMLreturn(Val_int(mbedtls_ctr_drbg_random(CtrDrbg_val(p_rng), String_val(output), Int_val(output_len))));
+	CAMLreturn(Val_int(mbedtls_ctr_drbg_random(CtrDrbg_val(p_rng), Bytes_val(output), Int_val(output_len))));
 }
 
 CAMLprim value ml_mbedtls_ctr_drbg_seed(value ctx, value p_entropy, value custom) {
@@ -124,7 +120,7 @@ CAMLprim value ml_mbedtls_entropy_init(void) {
 
 CAMLprim value ml_mbedtls_entropy_func(value data, value output, value len) {
 	CAMLparam3(data, output, len);
-	CAMLreturn(Val_int(mbedtls_entropy_func(PVoid_val(data), String_val(output), Int_val(len))));
+	CAMLreturn(Val_int(mbedtls_entropy_func(PVoid_val(data), Bytes_val(output), Int_val(len))));
 }
 
 // Certificate
@@ -171,7 +167,7 @@ CAMLprim value ml_mbedtls_x509_next(value chain) {
 
 CAMLprim value ml_mbedtls_x509_crt_parse(value chain, value bytes) {
 	CAMLparam2(chain, bytes);
-	const char* buf = String_val(bytes);
+	const unsigned char* buf = Bytes_val(bytes);
 	int len = caml_string_length(bytes);
 	CAMLreturn(Val_int(mbedtls_x509_crt_parse(X509Crt_val(chain), buf, len + 1)));
 }
@@ -191,8 +187,7 @@ CAMLprim value ml_mbedtls_x509_crt_parse_path(value chain, value path) {
 value caml_string_of_asn1_buf(mbedtls_asn1_buf* dat) {
 	CAMLparam0();
 	CAMLlocal1(s);
-	s = caml_alloc_string(dat->len);
-	memcpy(String_val(s), dat->p, dat->len);
+	s = caml_alloc_initialized_string(dat->len, (const char *)dat->p);
 	CAMLreturn(s);
 }
 
@@ -200,7 +195,11 @@ CAMLprim value hx_cert_get_alt_names(value chain) {
 	CAMLparam1(chain);
 	CAMLlocal1(obj);
 	mbedtls_x509_crt* cert = X509Crt_val(chain);
-	if (cert->ext_types & MBEDTLS_X509_EXT_SUBJECT_ALT_NAME == 0 || &cert->subject_alt_names == NULL) {
+#if MBEDTLS_VERSION_MAJOR >= 3
+	if (!mbedtls_x509_crt_has_ext_type(cert, MBEDTLS_X509_EXT_SUBJECT_ALT_NAME)) {
+#else
+	if ((cert->ext_types & MBEDTLS_X509_EXT_SUBJECT_ALT_NAME) == 0) {
+#endif
 		obj = Atom(0);
 	} else {
 		mbedtls_asn1_sequence* cur = &cert->subject_alt_names;
@@ -303,12 +302,59 @@ static struct custom_operations ssl_config_ops = {
 	.deserialize = custom_deserialize_default,
 };
 
+#ifdef _WIN32
+static int verify_callback(void* param, mbedtls_x509_crt *crt, int depth, uint32_t *flags) {
+	if (*flags == 0 || *flags & MBEDTLS_X509_BADCERT_CN_MISMATCH) {
+		return 0;
+	}
+
+	HCERTSTORE store = CertOpenStore(CERT_STORE_PROV_MEMORY, 0, 0, CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG, NULL);
+	if(store == NULL) {
+		return MBEDTLS_ERR_X509_FATAL_ERROR;
+	}
+	PCCERT_CONTEXT primary_context = {0};
+	if(!CertAddEncodedCertificateToStore(store, X509_ASN_ENCODING, crt->raw.p, crt->raw.len, CERT_STORE_ADD_REPLACE_EXISTING, &primary_context)) {
+		CertCloseStore(store, 0);
+		return MBEDTLS_ERR_X509_FATAL_ERROR;
+	}
+	PCCERT_CHAIN_CONTEXT chain_context = {0};
+	CERT_CHAIN_PARA parameters = {0};
+	if(!CertGetCertificateChain(NULL, primary_context, NULL, store, &parameters, 0, NULL, &chain_context)) {
+		CertFreeCertificateContext(primary_context);
+		CertCloseStore(store, 0);
+		return MBEDTLS_ERR_X509_FATAL_ERROR;
+	}
+	CERT_CHAIN_POLICY_PARA policy_parameters = {0};
+	CERT_CHAIN_POLICY_STATUS policy_status = {0};
+	if(!CertVerifyCertificateChainPolicy(CERT_CHAIN_POLICY_SSL, chain_context, &policy_parameters, &policy_status)) {
+		CertFreeCertificateChain(chain_context);
+		CertFreeCertificateContext(primary_context);
+		CertCloseStore(store, 0);
+		return MBEDTLS_ERR_X509_FATAL_ERROR;
+	}
+	if(policy_status.dwError == 0) {
+		*flags = 0;
+	} else {
+		// if we ever want to read the verification result,
+		// we need to properly map dwError to flags
+		*flags |= MBEDTLS_X509_BADCERT_OTHER;
+	}
+	CertFreeCertificateChain(chain_context);
+	CertFreeCertificateContext(primary_context);
+	CertCloseStore(store, 0);
+	return 0;
+}
+#endif
+
 CAMLprim value ml_mbedtls_ssl_config_init(void) {
 	CAMLparam0();
 	CAMLlocal1(obj);
 	obj = caml_alloc_custom(&ssl_config_ops, sizeof(mbedtls_ssl_config*), 0, 1);
 	mbedtls_ssl_config* ssl_config = malloc(sizeof(mbedtls_ssl_config));
 	mbedtls_ssl_config_init(ssl_config);
+	#ifdef _WIN32
+	mbedtls_ssl_conf_verify(ssl_config, verify_callback, NULL);
+	#endif
 	Config_val(obj) = ssl_config;
 	CAMLreturn(obj);
 }
@@ -366,29 +412,39 @@ CAMLprim value ml_mbedtls_pk_init(void) {
 	CAMLreturn(obj);
 }
 
-CAMLprim value ml_mbedtls_pk_parse_key(value ctx, value key, value password) {
-	CAMLparam3(ctx, key, password);
-	const char* pwd = NULL;
+CAMLprim value ml_mbedtls_pk_parse_key(value ctx, value key, value password, value rng) {
+	CAMLparam4(ctx, key, password, rng);
+	const unsigned char* pwd = NULL;
 	size_t pwdlen = 0;
 	if (password != Val_none) {
-		pwd = String_val(Field(password, 0));
+		pwd = Bytes_val(Field(password, 0));
 		pwdlen = caml_string_length(Field(password, 0));
 	}
-	CAMLreturn(mbedtls_pk_parse_key(PkContext_val(ctx), String_val(key), caml_string_length(key) + 1, pwd, pwdlen));
+	#if MBEDTLS_VERSION_MAJOR >= 3
+	mbedtls_ctr_drbg_context *ctr_drbg = CtrDrbg_val(rng);
+	CAMLreturn(mbedtls_pk_parse_key(PkContext_val(ctx), Bytes_val(key), caml_string_length(key) + 1, pwd, pwdlen, mbedtls_ctr_drbg_random, NULL));
+	#else
+	CAMLreturn(mbedtls_pk_parse_key(PkContext_val(ctx), Bytes_val(key), caml_string_length(key) + 1, pwd, pwdlen));
+	#endif
 }
 
-CAMLprim value ml_mbedtls_pk_parse_keyfile(value ctx, value path, value password) {
-	CAMLparam3(ctx, path, password);
+CAMLprim value ml_mbedtls_pk_parse_keyfile(value ctx, value path, value password, value rng) {
+	CAMLparam4(ctx, path, password, rng);
 	const char* pwd = NULL;
 	if (password != Val_none) {
 		pwd = String_val(Field(password, 0));
 	}
+	#if MBEDTLS_VERSION_MAJOR >= 3
+	mbedtls_ctr_drbg_context *ctr_drbg = CtrDrbg_val(rng);
+	CAMLreturn(mbedtls_pk_parse_keyfile(PkContext_val(ctx), String_val(path), pwd, mbedtls_ctr_drbg_random, ctr_drbg));
+	#else
 	CAMLreturn(mbedtls_pk_parse_keyfile(PkContext_val(ctx), String_val(path), pwd));
+	#endif
 }
 
 CAMLprim value ml_mbedtls_pk_parse_public_key(value ctx, value key) {
 	CAMLparam2(ctx, key);
-	CAMLreturn(mbedtls_pk_parse_public_key(PkContext_val(ctx), String_val(key), caml_string_length(key) + 1));
+	CAMLreturn(mbedtls_pk_parse_public_key(PkContext_val(ctx), Bytes_val(key), caml_string_length(key) + 1));
 }
 
 CAMLprim value ml_mbedtls_pk_parse_public_keyfile(value ctx, value path) {
@@ -446,15 +502,14 @@ CAMLprim value ml_mbedtls_ssl_handshake(value ssl) {
 
 CAMLprim value ml_mbedtls_ssl_read(value ssl, value buf, value pos, value len) {
 	CAMLparam4(ssl, buf, pos, len);
-	CAMLreturn(Val_int(mbedtls_ssl_read(SslContext_val(ssl), String_val(buf) + Int_val(pos), Int_val(len))));
+	CAMLreturn(Val_int(mbedtls_ssl_read(SslContext_val(ssl), Bytes_val(buf) + Int_val(pos), Int_val(len))));
 }
 
 static int bio_write_cb(void* ctx, const unsigned char* buf, size_t len) {
 	CAMLparam0();
 	CAMLlocal3(r, s, vctx);
-	vctx = (value)ctx;
-	s = caml_alloc_string(len);
-	memcpy(String_val(s), buf, len);
+	vctx = *(value*)ctx;
+	s = caml_alloc_initialized_string(len, (const char*)buf);
 	r = caml_callback2(Field(vctx, 1), Field(vctx, 0), s);
 	CAMLreturn(Int_val(r));
 }
@@ -462,7 +517,7 @@ static int bio_write_cb(void* ctx, const unsigned char* buf, size_t len) {
 static int bio_read_cb(void* ctx, unsigned char* buf, size_t len) {
 	CAMLparam0();
 	CAMLlocal3(r, s, vctx);
-	vctx = (value)ctx;
+	vctx = *(value*)ctx;
 	s = caml_alloc_string(len);
 	r = caml_callback2(Field(vctx, 2), Field(vctx, 0), s);
 	memcpy(buf, String_val(s), len);
@@ -476,7 +531,11 @@ CAMLprim value ml_mbedtls_ssl_set_bio(value ssl, value p_bio, value f_send, valu
 	Store_field(ctx, 0, p_bio);
 	Store_field(ctx, 1, f_send);
 	Store_field(ctx, 2, f_recv);
-	mbedtls_ssl_set_bio(SslContext_val(ssl), (void*)ctx, bio_write_cb, bio_read_cb, NULL);
+	// TODO: this allocation is leaked
+	value *location = malloc(sizeof(value));
+	*location = ctx;
+	caml_register_generational_global_root(location);
+	mbedtls_ssl_set_bio(SslContext_val(ssl), (void*)location, bio_write_cb, bio_read_cb, NULL);
 	CAMLreturn(Val_unit);
 }
 
@@ -492,7 +551,7 @@ CAMLprim value ml_mbedtls_ssl_setup(value ssl, value conf) {
 
 CAMLprim value ml_mbedtls_ssl_write(value ssl, value buf, value pos, value len) {
 	CAMLparam4(ssl, buf, pos, len);
-	CAMLreturn(Val_int(mbedtls_ssl_write(SslContext_val(ssl), String_val(buf) + Int_val(pos), Int_val(len))));
+	CAMLreturn(Val_int(mbedtls_ssl_write(SslContext_val(ssl), Bytes_val(buf) + Int_val(pos), Int_val(len))));
 }
 
 // glue
@@ -520,36 +579,23 @@ CAMLprim value hx_cert_load_defaults(value certificate) {
 	#endif
 
 	#ifdef __APPLE__
-	CFMutableDictionaryRef search;
-	CFArrayRef result;
-	SecKeychainRef keychain;
-	SecCertificateRef item;
-	CFDataRef dat;
-	// Load keychain
-	if (SecKeychainOpen("/System/Library/Keychains/SystemRootCertificates.keychain", &keychain) == errSecSuccess) {
-		// Search for certificates
-		search = CFDictionaryCreateMutable(NULL, 0, NULL, NULL);
-		CFDictionarySetValue(search, kSecClass, kSecClassCertificate);
-		CFDictionarySetValue(search, kSecMatchLimit, kSecMatchLimitAll);
-		CFDictionarySetValue(search, kSecReturnRef, kCFBooleanTrue);
-		CFDictionarySetValue(search, kSecMatchSearchList, CFArrayCreate(NULL, (const void **)&keychain, 1, NULL));
-		if (SecItemCopyMatching(search, (CFTypeRef *)&result) == errSecSuccess) {
-			CFIndex n = CFArrayGetCount(result);
-			for (CFIndex i = 0; i < n; i++) {
-				item = (SecCertificateRef)CFArrayGetValueAtIndex(result, i);
-
-				// Get certificate in DER format
-				dat = SecCertificateCopyData(item);
-				if (dat) {
-					r = mbedtls_x509_crt_parse_der(chain, (unsigned char *)CFDataGetBytePtr(dat), CFDataGetLength(dat));
-					CFRelease(dat);
-					if (r != 0) {
-						CAMLreturn(Val_int(r));
-					}
+	CFArrayRef certs;
+	if (SecTrustCopyAnchorCertificates(&certs) == errSecSuccess) {
+		CFIndex count = CFArrayGetCount(certs);
+		for(CFIndex i = 0; i < count; i++) {
+			SecCertificateRef item = (SecCertificateRef)CFArrayGetValueAtIndex(certs, i);
+
+			// Get certificate in DER format
+			CFDataRef data = SecCertificateCopyData(item);
+			if(data) {
+				r = mbedtls_x509_crt_parse_der(chain, (unsigned char *)CFDataGetBytePtr(data), CFDataGetLength(data));
+				CFRelease(data);
+				if (r != 0) {
+					CAMLreturn(Val_int(r));
 				}
 			}
 		}
-		CFRelease(keychain);
+		CFRelease(certs);
 	}
 	#endif
 

+ 0 - 23
libs/neko/Makefile

@@ -1,23 +0,0 @@
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-SRC=nast.ml nxml.ml binast.ml nbytecode.ml ncompile.ml
-
-all: bytecode native
-
-native: neko.cmxa
-
-bytecode: neko.cma
-
-neko.cmxa: $(SRC)
-	ocamlfind $(OCAMLOPT) -package extlib -safe-string -a -o neko.cmxa $(SRC)
-
-neko.cma: $(SRC)
-	ocamlfind $(OCAMLC) -package extlib -safe-string -a -o neko.cma $(SRC)
-
-clean:
-	rm -rf neko.cmxa neko.cma neko.lib neko.a $(wildcard *.cmx) $(wildcard *.obj) $(wildcard *.o) $(wildcard *.cmi) $(wildcard *.cmo)
-
-.PHONY: all bytecode native clean
-
-Makefile: ;
-$(SRC): ;

+ 7 - 0
libs/objsize/LICENSE

@@ -0,0 +1,7 @@
+According to the README and to [README from backup of objsize new version](https://github.com/ygrek/objsize), 
+objsize is licensed either under BSD 3 Clause License or any version of GNU GENERAL PUBLIC LICENSE 
+published by Free Software Foundation.
+
+For use in Haxe, it was incorporated under the terms of the GNU General Public License
+as published by the Free Software Foundation; either version 2 of the License, or
+(at your option) any later version.

+ 0 - 29
libs/objsize/Makefile

@@ -1,29 +0,0 @@
-ALL_CFLAGS = $(CFLAGS) -I .
-LIBS =
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-SRC=objsize.mli objsize.ml
-
-all: bytecode native
-
-bytecode: objsize.cma
-
-native: objsize.cmxa
-
-objsize.cma: c_objsize.o $(SRC)
-	$(OCAMLC) -safe-string -a -o objsize.cma $(LIBS) $(SRC)
-
-objsize.cmxa: c_objsize.o $(SRC)
-	$(OCAMLOPT) -safe-string -a -o objsize.cmxa $(LIBS) $(SRC)
-
-c_objsize.o: c_objsize.c
-	$(OCAMLC) -safe-string $(ALL_CFLAGS) c_objsize.c
-
-clean:
-	rm -rf $(wildcard *.cma) $(wildcard *.cmxa) $(wildcard *.cmx) $(wildcard *.cmi) $(wildcard *.cmo) $(wildcard *.obj) $(wildcard *.o) $(wildcard *.a)
-
-.PHONY: all bytecode native clean
-
-Makefile: ;
-$(SRC): ;
-c_objsize.c: ;

+ 0 - 28
libs/pcre2/Makefile

@@ -1,28 +0,0 @@
-ALL_CFLAGS = $(CFLAGS) -I pcre2
-LIBS =
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-SRC = pcre2.ml pcre2_stubs.c
-
-all: bytecode native
-
-bytecode: pcre2.cma
-
-native: pcre2.cmxa
-
-pcre2.cma: pcre2_stubs.o pcre2.ml
-	$(OCAMLC) -safe-string -a -o pcre2.cma $(LIBS) pcre2.ml
-
-pcre2.cmxa: pcre2.ml pcre2_stubs.o
-	$(OCAMLOPT) -safe-string -a -o pcre2.cmxa $(LIBS) pcre2.ml
-
-pcre2_stubs.o: pcre2_stubs.c
-	$(OCAMLC) -safe-string $(ALL_CFLAGS) pcre2_stubs.c
-
-clean:
-	rm -f pcre2.cma pcre2.cmi pcre2.cmx pcre2.cmxa pcre2.o pcre2.obj pcre2_stubs.obj pcre2_stubs.o
-	rm -f pcre2.a libpcre2-8.a libpcre2-8.lib pcre2.cmo
-
-.PHONY: all bytecode native clean
-Makefile: ;
-$(SRC): ;

+ 0 - 81
libs/swflib/Makefile

@@ -1,81 +0,0 @@
-# Makefile generated by OCamake
-# http://tech.motion-twin.com
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-.SUFFIXES : .ml .mli .cmo .cmi .cmx .mll .mly
-
-ALL_CFLAGS= $(CFLAGS) -safe-string -package extlib -I ../extlib-leftovers -I ../extc -g
-LIBS=
-
-SRC=actionScript.ml as3hl.mli as3.mli png.ml swflib.sln swf.ml swfPic.ml as3code.ml as3hlparse.ml as3parse.ml png.mli swfParser.ml
-
-MODULES=as3code.cmx png.cmx swf.cmx actionScript.cmx as3parse.cmx swfPic.cmx as3hlparse.cmx swfParser.cmx
-
-all: native bytecode
-
-native: swflib.cmxa
-
-bytecode: swflib.cma
-
-swflib.cmxa: $(MODULES)
-	ocamlfind $(OCAMLOPT) -safe-string -o swflib.cmxa -a $(LIBS) $(MODULES)
-
-swflib.cma: $(MODULES:.cmx=.cmo)
-	ocamlfind $(OCAMLC) -safe-string -o swflib.cma -a $(LFLAGS) $(LIBS) $(MODULES:.cmx=.cmo)
-
-actionScript.cmx: swf.cmx
-
-actionScript.cmo: swf.cmi
-
-as3code.cmo: as3.cmi
-
-as3code.cmx: as3.cmi
-
-as3hl.cmi: as3.cmi
-
-as3hlparse.cmo: as3parse.cmo as3hl.cmi as3code.cmo as3.cmi
-
-as3hlparse.cmx: as3parse.cmx as3hl.cmi as3code.cmx as3.cmi
-
-as3parse.cmo: as3code.cmo as3.cmi
-
-as3parse.cmx: as3code.cmx as3.cmi
-
-png.cmo: png.cmi
-
-png.cmx: png.cmi
-
-swf.cmo: as3.cmi
-
-swf.cmx: as3.cmi
-
-swfParser.cmo: swf.cmo as3parse.cmo actionScript.cmo
-
-swfParser.cmx: swf.cmx as3parse.cmx actionScript.cmx
-
-swfPic.cmx: swf.cmx png.cmi
-
-clean:
-	rm -f swflib.cmxa swflib.cma swflib.lib swflib.a as3.cmi as3hl.cmi
-	rm -f $(MODULES) $(MODULES:.cmx=.obj) $(MODULES:.cmx=.cmi) $(MODULES:.cmx=.o) $(MODULES:.cmx=.cmo)
-
-# SUFFIXES
-.ml.cmo:
-	ocamlfind $(OCAMLC) $(ALL_CFLAGS) -c $<
-
-.ml.cmx:
-	ocamlfind $(OCAMLOPT) $(ALL_CFLAGS) -c $<
-
-.mli.cmi:
-	ocamlfind $(OCAMLC) $(ALL_CFLAGS) $<
-
-.mll.ml:
-	ocamlfind ocamllex $<
-
-.mly.ml:
-	ocamlfind ocamlyacc $<
-
-.PHONY: all bytecode native clean
-
-Makefile: ;
-$(SRC): ;

+ 0 - 22
libs/ziplib/Makefile

@@ -1,22 +0,0 @@
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-SRC=zlib.mli zlib.ml zip.mli zip.ml
-
-all: native bytecode
-
-native: ziplib.cmxa
-ziplib.cmxa: $(SRC)
-	ocamlfind $(OCAMLOPT) -safe-string -g -I ../extlib -I ../extc -a -o ziplib.cmxa $(SRC)
-
-bytecode: ziplib.cma
-ziplib.cma: $(SRC)
-	ocamlfind $(OCAMLC) -safe-string -g -I ../extlib -I ../extc -a -o ziplib.cma $(SRC)
-
-clean:
-	rm -rf ziplib.cmxa ziplib.cma ziplib.lib ziplib.a $(wildcard *.cmx) $(wildcard *.obj) $(wildcard *.o) $(wildcard *.cmi) $(wildcard *.cmo)
-
-.PHONY: all native bytecode clean
-
-Makefile: ;
-
-$(SRC): ;

+ 0 - 7
libs/ziplib/test/Makefile

@@ -1,7 +0,0 @@
-OCAMLOPT=ocamlopt
-
-all: ../zip.cmxa minizip.ml
-	$(OCAMLOPT) -g -g -I .. -I ../../extc -o minizip -cclib ../../extc/extc_stubs.o -cclib -lz unix.cmxa ../zip.cmxa minizip.ml
-
-clean:
-	rm -rf minizip $(wildcard *.cmx) $(wildcard *.obj) $(wildcard *.o) $(wildcard *.cmi)

+ 67 - 10
src-json/define.json

@@ -18,7 +18,8 @@
 	},
 	{
 		"name": "AnalyzerTimes",
-		"define": "analyzer-times",
+		"define": "times.analyzer",
+		"deprecatedDefine": "analyzer-times",
 		"doc": "Record detailed timers for the analyzer",
 		"params": ["level: 0 | 1 | 2"]
 	},
@@ -53,6 +54,7 @@
 		"define": "dce",
 		"doc": "Set the dead code elimination mode. (default: std)",
 		"params": ["mode: std | full | no"],
+		"default": "std",
 		"links": ["https://haxe.org/manual/cr-dce.html"]
 	},
 	{
@@ -66,6 +68,16 @@
 		"define": "debug",
 		"doc": "Activated when compiling with -debug."
 	},
+	{
+		"name": "DisableHxbCache",
+		"define": "disable-hxb-cache",
+		"doc": "Use in-memory cache instead of hxb powered cache."
+	},
+	{
+		"name": "DisableHxbOptimizations",
+		"define": "disable-hxb-optimizations",
+		"doc": "Disable shortcuts used by hxb cache to speed up display requests."
+	},
 	{
 		"name": "DisableUnicodeStrings",
 		"define": "disable-unicode-strings",
@@ -104,6 +116,7 @@
 		"name": "DumpPath",
 		"define": "dump-path",
 		"doc": "Path to generate dumps to (default: \"dump\").",
+		"default": "dump",
 		"params": ["path"]
 	},
 	{
@@ -114,7 +127,8 @@
 	{
 		"name": "DumpIgnoreVarIds",
 		"define": "dump-ignore-var-ids",
-		"doc": "Remove variable IDs from non-pretty dumps (helps with diff)."
+		"doc": "Remove variable IDs from non-pretty dumps (helps with diff).",
+		"default": "1"
 	},
 	{
 		"name": "DynamicInterfaceClosures",
@@ -127,6 +141,7 @@
 		"define": "eval-call-stack-depth",
 		"doc": "Set maximum call stack depth for eval. (default: 1000)",
 		"platforms": ["eval"],
+		"default": "1000",
 		"params": ["depth"]
 	},
 	{
@@ -140,6 +155,7 @@
 		"define": "eval-print-depth",
 		"doc": "Set maximum print depth (before replacing with '<...>') for eval. (default: 5)",
 		"platforms": ["eval"],
+		"default": "5",
 		"params": ["depth"]
 	},
 	{
@@ -156,13 +172,20 @@
 	},
 	{
 		"name": "EvalTimes",
-		"define": "eval-times",
+		"define": "times.eval",
+		"deprecatedDefine": "eval-times",
 		"doc": "Record per-method execution times in macro/interp mode. Implies eval-stack.",
 		"platforms": ["eval"]
 	},
+	{
+		"name": "FailFast",
+		"define": "fail-fast",
+		"doc": "Abort compilation when first error occurs."
+	},
 	{
 		"name": "FilterTimes",
-		"define": "filter-times",
+		"define": "times.filter",
+		"deprecatedDefine": "filter-times",
 		"doc": "Record per-filter execution times upon --times."
 	},
 	{
@@ -223,6 +246,27 @@
 		"doc": "The current Haxe version value in SemVer format.",
 		"reserved": true
 	},
+	{
+		"name": "Haxe3",
+		"define": "haxe3",
+		"doc": "The current Haxe major version is >= 3.",
+		"default": "1",
+		"reserved": true
+	},
+	{
+		"name": "Haxe4",
+		"define": "haxe4",
+		"doc": "The current Haxe major version is >= 4.",
+		"default": "1",
+		"reserved": true
+	},
+	{
+		"name": "Haxe5",
+		"define": "haxe5",
+		"doc": "The current Haxe major version is >= 5.",
+		"default": "1",
+		"reserved": true
+	},
 	{
 		"name": "HaxeNext",
 		"define": "haxe-next",
@@ -252,10 +296,16 @@
 	{
 		"name": "HlVer",
 		"define": "hl-ver",
-		"doc": "The HashLink version to target. (default: 1.10.0)",
+		"doc": "The HashLink version to target. (default: 1.15.0)",
 		"platforms": ["hl"],
 		"params": ["version"]
 	},
+	{
+		"name": "HxbTimes",
+		"define": "times.hxb",
+		"deprecatedDefine": "hxb-times",
+		"doc": "Display hxb timing when used with `--times`."
+	},
 	{
 		"name": "HxcppApiLevel",
 		"define": "hxcpp-api-level",
@@ -460,6 +510,7 @@
 		"name": "LoopUnrollMaxCost",
 		"define": "loop-unroll-max-cost",
 		"doc": "Maximum cost (number of expressions * iterations) before loop unrolling is canceled. (default: 250)",
+		"default": "250",
 		"params": ["cost"]
 	},
 	{
@@ -490,7 +541,8 @@
 	},
 	{
 		"name": "MacroTimes",
-		"define": "macro-times",
+		"define": "times.macro",
+		"deprecatedDefine": "macro-times",
 		"doc": "Display per-macro timing when used with `--times`."
 	},
 	{
@@ -534,7 +586,8 @@
 	{
 		"name": "NoDeprecationWarnings",
 		"define": "no-deprecation-warnings",
-		"doc": "Do not warn if fields annotated with `@:deprecated` are used."
+		"doc": "Do not warn if fields annotated with `@:deprecated` are used.",
+		"deprecated": "Use -w to configure warnings. See https://haxe.org/manual/cr-warnings.html for more information."
 	},
 	{
 		"name": "NoFlashOverride",
@@ -586,7 +639,8 @@
 	{
 		"name": "OldErrorFormat",
 		"define": "old-error-format",
-		"doc": "Use Haxe 3.x zero-based column error messages instead of new one-based format."
+		"doc": "Use Haxe 3.x zero-based column error messages instead of new one-based format.",
+		"deprecated": "OldErrorFormat has been removed in Haxe 5"
 	},
 	{
 		"name": "PhpPrefix",
@@ -771,7 +825,8 @@
 	{
 		"name": "WarnVarShadowing",
 		"define": "warn-var-shadowing",
-		"doc": "Warn about shadowing variable declarations."
+		"doc": "Warn about shadowing variable declarations.",
+		"deprecated": "Use -w to configure warnings. See https://haxe.org/manual/cr-warnings.html for more information."
 	},
 	{
 		"name": "NoTre",
@@ -781,7 +836,8 @@
 	{
 		"name": "MessageReporting",
 		"define": "message.reporting",
-		"doc": "Select message reporting mode for compiler output. (default: classic)",
+		"doc": "Select message reporting mode for compiler output. (default: pretty)",
+		"default": "pretty",
 		"params": ["mode: classic | pretty | indent"]
 	},
 	{
@@ -803,6 +859,7 @@
 		"name": "MessageLogFormat",
 		"define": "message.log-format",
 		"doc": "Select message reporting mode for message log file. (default: indent)",
+		"default": "indent",
 		"params": ["format: classic | pretty | indent"]
 	}
 ]

+ 7 - 1
src-json/meta.json

@@ -336,6 +336,12 @@
 		"targets": ["TAbstractField"],
 		"links": ["https://haxe.org/manual/types-abstract-implicit-casts.html"]
 	},
+	{
+		"name": "FunctionalInterface",
+		"metadata": ":functionalInterface",
+		"doc": "Mark an interface as a functional interface",
+		"platforms": ["jvm"]
+	},
 	{
 		"name": "FunctionCode",
 		"metadata": ":functionCode",
@@ -481,7 +487,7 @@
 		"name": "InheritDoc",
 		"metadata": ":inheritDoc",
 		"doc": "Append documentation from a parent field or class (if used without an argument) or from a specified class or field (if used like @:inheritDoc(pack.Some.field)).",
-		"targets": ["TClass", "TClass", "TEnum", "TAbstract", "TAnyField"]
+		"targets": ["TClass", "TEnum", "TAbstract", "TAnyField"]
 	},
 	{
 		"name": "InitPackage",

+ 13 - 1
src-json/warning.json

@@ -76,7 +76,8 @@
 	{
 		"name": "WVarShadow",
 		"doc": "A local variable hides another by using the same name",
-		"parent": "WTyper"
+		"parent": "WTyper",
+		"enabled": false
 	},
 	{
 		"name": "WExternWithExpr",
@@ -118,6 +119,12 @@
 		"doc": "Constructor call could not be inlined because a field is uninitialized",
 		"parent": "WTyper"
 	},
+	{
+		"name": "WUnsafeEnumEquality",
+		"doc": "Equality operations on enums with parameters might not work as expected",
+		"parent": "WTyper",
+		"enabled": false
+	},
 	{
 		"name": "WHxb",
 		"doc": "Hxb (either --hxb output or haxe compiler cache) related warnings"
@@ -126,5 +133,10 @@
 		"name": "WUnboundTypeParameter",
 		"doc": "Hxb (either --hxb output or haxe compiler cache) failed to link a type parameter to an actual type",
 		"parent": "WHxb"
+	},
+	{
+		"name": "WUnclosedMonomorph",
+		"doc": "Hxb writer failed to close a monomorph (that monomorph should have been closed in the first place)",
+		"parent": "WHxb"
 	}
 ]

+ 0 - 14
src-prebuild/dune

@@ -1,14 +0,0 @@
-(include_subdirs no)
-
-(env
-	(_
-		(flags (-w -9 -w -32))
-	)
-)
-
-(executable
-	(name prebuild)
-	(public_name haxe_prebuild)
-	(package haxe_prebuild)
-	(libraries extlib json)
-)

+ 4 - 351
src/codegen/codegen.ml

@@ -19,7 +19,6 @@
 
 open Ast
 open Type
-open Common
 open Globals
 open Extlib_leftovers
 
@@ -48,152 +47,6 @@ let get_properties fields =
 			| _ -> acc
 	) [] fields
 
-let add_property_field com c =
-	let p = c.cl_pos in
-	let props = get_properties (c.cl_ordered_statics @ c.cl_ordered_fields) in
-	match props with
-	| [] -> ()
-	| _ ->
-		let fields,values = List.fold_left (fun (fields,values) (n,v) ->
-			let cf = mk_field n com.basic.tstring p null_pos in
-			PMap.add n cf fields,((n,null_pos,NoQuotes),Texpr.Builder.make_string com.basic v p) :: values
-		) (PMap.empty,[]) props in
-		let t = mk_anon ~fields (ref Closed) in
-		let e = mk (TObjectDecl values) t p in
-		let cf = mk_field ~static:true "__properties__" t p null_pos in
-		cf.cf_expr <- Some e;
-		c.cl_statics <- PMap.add cf.cf_name cf c.cl_statics;
-		c.cl_ordered_statics <- cf :: c.cl_ordered_statics
-
-(* -------------------------------------------------------------------------- *)
-(* FIX OVERRIDES *)
-
-(*
-	on some platforms which doesn't support type parameters, we must have the
-	exact same type for overridden/implemented function as the original one
-*)
-
-let rec find_field com c f =
-	try
-		(match c.cl_super with
-		| None ->
-			raise Not_found
-		| Some ( {cl_path = (["cpp"],"FastIterator")}, _ ) ->
-			raise Not_found (* This is a strongly typed 'extern' and the usual rules don't apply *)
-		| Some (c,_) ->
-			find_field com c f)
-	with Not_found -> try
-		if com.platform = Cpp || com.platform = Hl then (* uses delegation for interfaces *)
-			raise Not_found;
-		let rec loop = function
-			| [] ->
-				raise Not_found
-			| (c,_) :: l ->
-				try
-					find_field com c f
-				with
-					Not_found -> loop l
-		in
-		loop c.cl_implements
-	with Not_found ->
-		let f = PMap.find f.cf_name c.cl_fields in
-		(match f.cf_kind with Var { v_read = AccRequire _ } -> raise Not_found | _ -> ());
-		f
-
-let fix_override com c f fd =
-	let f2 = (try Some (find_field com c f) with Not_found -> None) in
-	match f2,fd with
-		| Some (f2), Some(fd) ->
-			let targs, tret = (match follow f2.cf_type with TFun (args,ret) -> args, ret | _ -> die "" __LOC__) in
-			let changed_args = ref [] in
-			let prefix = "_tmp_" in
-			let nargs = List.map2 (fun ((v,ct) as cur) (_,_,t2) ->
-				try
-					type_eq EqStrict (monomorphs c.cl_params (monomorphs f.cf_params v.v_type)) t2;
-					(* Flash generates type parameters with a single constraint as that constraint type, so we
-					   have to detect this case and change the variable (issue #2712). *)
-					begin match follow v.v_type with
-						| TInst({cl_kind = KTypeParameter ttp} as cp,_) when com.platform = Flash ->
-							begin match get_constraints ttp with
-							| [tc] ->
-								if List.exists (fun tp -> tp.ttp_name = (snd cp.cl_path)) c.cl_params then raise (Unify_error [])
-							| _ ->
-								()
-							end
-						| _ ->
-							()
-					end;
-					cur
-				with Unify_error _ ->
-					let v2 = alloc_var VGenerated (prefix ^ v.v_name) t2 v.v_pos in
-					changed_args := (v,v2) :: !changed_args;
-					v2,ct
-			) fd.tf_args targs in
-			let fd2 = {
-				tf_args = nargs;
-				tf_type = tret;
-				tf_expr = (match List.rev !changed_args with
-					| [] -> fd.tf_expr
-					| args ->
-						let e = fd.tf_expr in
-						let el = (match e.eexpr with TBlock el -> el | _ -> [e]) in
-						let p = (match el with [] -> e.epos | e :: _ -> e.epos) in
-						let el_v = List.map (fun (v,v2) ->
-							mk (TVar (v,Some (mk (TCast (mk (TLocal v2) v2.v_type p,None)) v.v_type p))) com.basic.tvoid p
-						) args in
-						{ e with eexpr = TBlock (el_v @ el) }
-				);
-			} in
-			let targs = List.map (fun(v,c) -> (v.v_name, Option.is_some c, v.v_type)) nargs in
-			let fde = (match f.cf_expr with None -> die "" __LOC__ | Some e -> e) in
-			f.cf_expr <- Some { fde with eexpr = TFunction fd2 };
-			f.cf_type <- TFun(targs,tret);
-		| Some(f2), None when (has_class_flag c CInterface) ->
-			let targs, tret = (match follow f2.cf_type with TFun (args,ret) -> args, ret | _ -> die "" __LOC__) in
-			f.cf_type <- TFun(targs,tret)
-		| _ ->
-			()
-
-let fix_overrides com t =
-	match t with
-	| TClassDecl c ->
-		(* overrides can be removed from interfaces *)
-		if (has_class_flag c CInterface) then
-			c.cl_ordered_fields <- List.filter (fun f ->
-				try
-					if find_field com c f == f then raise Not_found;
-					c.cl_fields <- PMap.remove f.cf_name c.cl_fields;
-					false;
-				with Not_found ->
-					true
-			) c.cl_ordered_fields;
-		List.iter (fun f ->
-			match f.cf_expr, f.cf_kind with
-			| Some { eexpr = TFunction fd }, Method (MethNormal | MethInline) ->
-				fix_override com c f (Some fd)
-			| None, Method (MethNormal | MethInline) when (has_class_flag c CInterface) ->
-				fix_override com c f None
-			| _ ->
-				()
-		) c.cl_ordered_fields
-	| _ ->
-		()
-
-(*
-	PHP does not allow abstract classes extending other abstract classes to override any fields, so these duplicates
-	must be removed from the child interface
-*)
-let fix_abstract_inheritance com t =
-	match t with
-	| TClassDecl c when (has_class_flag c CInterface) ->
-		c.cl_ordered_fields <- List.filter (fun f ->
-			let b = try (find_field com c f) == f
-			with Not_found -> false in
-			if not b then c.cl_fields <- PMap.remove f.cf_name c.cl_fields;
-			b;
-		) c.cl_ordered_fields
-	| _ -> ()
-
 (* -------------------------------------------------------------------------- *)
 (* MISC FEATURES *)
 
@@ -216,210 +69,16 @@ let bytes_serialize data =
 	let tbl = Array.init (String.length b64) (fun i -> String.get b64 i) in
 	Bytes.unsafe_to_string (Base64.str_encode ~tbl data)
 
-module Dump = struct
-	(*
-		Make a dump of the full typed AST of all types
-	*)
-	let create_dumpfile acc l =
-		let ch = Path.create_file false ".dump" acc l in
-		let buf = Buffer.create 0 in
-		buf, (fun () ->
-			output_string ch (Buffer.contents buf);
-			close_out ch)
-
-	let create_dumpfile_from_path com path =
-		let buf,close = create_dumpfile [] ((dump_path com) :: (platform_name_macro com) :: fst path @ [snd path]) in
-		buf,close
-
-	let dump_types com pretty =
-		let s_type = s_type (Type.print_context()) in
-		let s_expr,s_type_param = if not pretty then
-			(Type.s_expr_ast (not (Common.defined com Define.DumpIgnoreVarIds)) "\t"),(Printer.s_type_param "")
-		else
-			(Type.s_expr_pretty false "\t" true),(s_type_param s_type)
-		in
-		let params tl = match tl with
-			| [] -> ""
-			| l -> Printf.sprintf "<%s>" (String.concat ", " (List.map s_type_param l))
-		in
-		List.iter (fun mt ->
-			let path = Type.t_path mt in
-			let buf,close = create_dumpfile_from_path com path in
-			let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
-			let s_metas ml tabs =
-				let args el =
-					match el with
-					| [] -> ""
-					| el -> Printf.sprintf "(%s)" (String.concat ", " (List.map (fun e -> Ast.Printer.s_expr e) el)) in
-				match ml with
-				| [] -> ""
-				| ml -> String.concat " " (List.map (fun me -> match me with (m,el,_) -> "@" ^ Meta.to_string m ^ args el) ml) ^ "\n" ^ tabs in
-			(match mt with
-			| Type.TClassDecl c ->
-				let s_cf_expr f =
-					match f.cf_expr with
-					| None -> ""
-					| Some e -> Printf.sprintf "%s" (s_expr s_type e) in
-				let is_inline_var v : bool = v = Var { v_read = AccInline; v_write = AccNever } in
-				let rec print_field stat f =
-					print "\n\t%s%s%s%s%s %s%s"
-						(s_metas f.cf_meta "\t")
-						(if (has_class_field_flag f CfPublic && not ((has_class_flag c CExtern) || (has_class_flag c CInterface))) then "public " else "")
-						(if stat then "static " else "")
-						(match f.cf_kind with
-							| Var v when (is_inline_var f.cf_kind) -> "inline "
-							| Var v -> ""
-							| Method m ->
-								match m with
-								| MethNormal -> ""
-								| MethDynamic -> "dynamic "
-								| MethInline -> "inline "
-								| MethMacro -> "macro ")
-						(match f.cf_kind with Var v -> "var" | Method m -> "function")
-						(f.cf_name ^ match f.cf_kind with
-							| Var { v_read = AccNormal; v_write = AccNormal } -> ""
-							| Var v when (is_inline_var f.cf_kind) -> ""
-							| Var v -> "(" ^ s_access true v.v_read ^ "," ^ s_access false v.v_write ^ ")"
-							| _ -> "")
-						(params f.cf_params);
-					(match f.cf_kind with
-						| Var v -> print ":%s%s;" (s_type f.cf_type)
-							(match f.cf_expr with
-							| None -> ""
-							| Some e -> " = " ^ (s_cf_expr f));
-						| Method m -> if ((has_class_flag c CExtern) || (has_class_flag c CInterface)) then (
-							match f.cf_type with
-							| TFun(al,t) -> print "(%s):%s;" (String.concat ", " (
-								List.map (fun (n,o,t) -> n ^ ":" ^ (s_type t)) al))
-								(s_type t)
-							| _ -> ()
-						) else print "%s" (s_cf_expr f));
-					print "\n";
-					List.iter (fun f -> print_field stat f) f.cf_overloads
-				in
-				print "%s%s%s%s %s%s" (s_metas c.cl_meta "") (if c.cl_private then "private " else "") (if (has_class_flag c CExtern) then "extern " else "") (if (has_class_flag c CInterface) then "interface" else "class") (s_type_path path) (params c.cl_params);
-				(match c.cl_super with None -> () | Some (c,pl) -> print " extends %s" (s_type (TInst (c,pl))));
-				List.iter (fun (c,pl) -> print " implements %s" (s_type (TInst (c,pl)))) c.cl_implements;
-				(match c.cl_array_access with None -> () | Some t -> print " implements ArrayAccess<%s>" (s_type t));
-				print " {\n";
-				(match c.cl_constructor with
-				| None -> ()
-				| Some f -> print_field false f);
-				List.iter (print_field false) c.cl_ordered_fields;
-				List.iter (print_field true) c.cl_ordered_statics;
-				(match TClass.get_cl_init c with
-				| None -> ()
-				| Some e ->
-					print "\n\tstatic function __init__() ";
-					print "%s" (s_expr s_type e);
-					print "\n");
-				print "}";
-			| Type.TEnumDecl e ->
-				print "%s%s%senum %s%s {\n" (s_metas e.e_meta "") (if e.e_private then "private " else "") (if e.e_extern then "extern " else "") (s_type_path path) (params e.e_params);
-				List.iter (fun n ->
-					let f = PMap.find n e.e_constrs in
-					print "\t%s%s;\n" f.ef_name (
-						match f.ef_type with
-						| TFun (al,t) -> Printf.sprintf "(%s)" (String.concat ", "
-							(List.map (fun (n,o,t) -> (if o then "?" else "") ^ n ^ ":" ^ (s_type t)) al))
-						| _ -> "")
-				) e.e_names;
-				print "}"
-			| Type.TTypeDecl t ->
-				print "%s%stypedef %s%s = %s" (s_metas t.t_meta "") (if t.t_private then "private " else "") (s_type_path path) (params t.t_params) (s_type t.t_type);
-			| Type.TAbstractDecl a ->
-				print "%s%sabstract %s%s%s%s {}" (s_metas a.a_meta "") (if a.a_private then "private " else "") (s_type_path path) (params a.a_params)
-				(String.concat " " (List.map (fun t -> " from " ^ s_type t) a.a_from))
-				(String.concat " " (List.map (fun t -> " to " ^ s_type t) a.a_to));
-			);
-			close();
-		) com.types
-
-	let dump_record com =
-		List.iter (fun mt ->
-			let buf,close = create_dumpfile_from_path com (t_path mt) in
-			let s = match mt with
-				| TClassDecl c -> Printer.s_tclass "" c
-				| TEnumDecl en -> Printer.s_tenum "" en
-				| TTypeDecl t -> Printer.s_tdef "" t
-				| TAbstractDecl a -> Printer.s_tabstract "" a
-			in
-			Buffer.add_string buf s;
-			close();
-		) com.types
-
-	let dump_position com =
-		List.iter (fun mt ->
-			match mt with
-				| TClassDecl c ->
-					let buf,close = create_dumpfile_from_path com (t_path mt) in
-					Printf.bprintf buf "%s\n" (s_type_path c.cl_path);
-					let field cf =
-						Printf.bprintf buf "\t%s\n" cf.cf_name;
-						begin match cf.cf_expr with
-						| None -> ()
-						| Some e ->
-							Printf.bprintf buf "%s\n" (Texpr.dump_with_pos "\t" e);
-						end
-					in
-					Option.may field c.cl_constructor;
-					List.iter field c.cl_ordered_statics;
-					List.iter field c.cl_ordered_fields;
-					close();
-				| _ ->
-					()
-		) com.types
-
-	let dump_types com =
-		match Common.defined_value_safe com Define.Dump with
-			| "pretty" -> dump_types com true
-			| "record" -> dump_record com
-			| "position" -> dump_position com
-			| _ -> dump_types com false
-
-	let dump_dependencies ?(target_override=None) com =
-		let target_name = match target_override with
-			| None -> platform_name_macro com
-			| Some s -> s
-		in
-		let dump_dependencies_path = [dump_path com;target_name;"dependencies"] in
-		let buf,close = create_dumpfile [] dump_dependencies_path in
-		let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
-		let dep = Hashtbl.create 0 in
-		List.iter (fun m ->
-			print "%s:\n" (Path.UniqueKey.lazy_path m.m_extra.m_file);
-			PMap.iter (fun _ mdep ->
-				let m2 = com.module_lut#find mdep.md_path in
-				let file = Path.UniqueKey.lazy_path m2.m_extra.m_file in
-				print "\t%s\n" file;
-				let l = try Hashtbl.find dep file with Not_found -> [] in
-				Hashtbl.replace dep file (m :: l)
-			) m.m_extra.m_deps;
-		) com.Common.modules;
-		close();
-		let dump_dependants_path = [dump_path com;target_name;"dependants"] in
-		let buf,close = create_dumpfile [] dump_dependants_path in
-		let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
-		Hashtbl.iter (fun n ml ->
-			print "%s:\n" n;
-			List.iter (fun m ->
-				print "\t%s\n" (Path.UniqueKey.lazy_path m.m_extra.m_file);
-			) ml;
-		) dep;
-		close()
-end
-
 (*
 	Build a default safe-cast expression :
 	{ var $t = <e>; if( Std.is($t,<t>) ) $t else throw "Class cast error"; }
 *)
-let default_cast ?(vtmp="$t") com e texpr t p =
-	let api = com.basic in
+let default_cast ?(vtmp="$t") api std e texpr t p =
 	let vtmp = alloc_var VGenerated vtmp e.etype e.epos in
 	let var = mk (TVar (vtmp,Some e)) api.tvoid p in
 	let vexpr = mk (TLocal vtmp) e.etype p in
 	let texpr = Texpr.Builder.make_typeexpr texpr p in
-	let is = Texpr.Builder.resolve_and_make_static_call com.std "isOfType" [vexpr;texpr] p in
+	let is = Texpr.Builder.resolve_and_make_static_call std "isOfType" [vexpr;texpr] p in
 	let enull = Texpr.Builder.make_null vexpr.etype p in
 	let eop = Texpr.Builder.binop OpEq vexpr enull api.tbool p in
 	let echeck = Texpr.Builder.binop OpBoolOr is eop api.tbool p in
@@ -448,12 +107,12 @@ module UnificationCallback = struct
 			List.map (fun e -> f e t_dynamic) el
 end;;
 
-let interpolate_code com code tl f_string f_expr p =
+let interpolate_code error code tl f_string f_expr p =
 	let exprs = Array.of_list tl in
 	let i = ref 0 in
 	let err msg =
 		let pos = { p with pmin = p.pmin + !i } in
-		com.error msg pos
+		error msg pos
 	in
 	let regex = Str.regexp "[{}]" in
 	let rec loop m = match m with
@@ -482,12 +141,6 @@ let interpolate_code com code tl f_string f_expr p =
 	in
 	loop (Str.full_split regex code)
 
-let map_source_header com f =
-	match Common.defined_value_safe com Define.SourceHeader with
-	| "" -> ()
-	| s -> f s
-
-
 (* Static extensions for classes *)
 module ExtClass = struct
 	let add_static_init c cf e p =

+ 223 - 0
src/codegen/dump.ml

@@ -0,0 +1,223 @@
+open Globals
+open Common
+open Type
+
+let dump_path defines =
+	Define.defined_value_safe ~default:"dump" defines Define.DumpPath
+
+(*
+	Make a dump of the full typed AST of all types
+*)
+let create_dumpfile acc l =
+	let ch = Path.create_file false ".dump" acc l in
+	let buf = Buffer.create 0 in
+	buf, (fun () ->
+		output_string ch (Buffer.contents buf);
+		close_out ch)
+
+let create_dumpfile_from_path com path =
+	let buf,close = create_dumpfile [] ((dump_path com.defines) :: (platform_name_macro com) :: fst path @ [snd path]) in
+	buf,close
+
+let dump_types com pretty =
+	let print_ids = not (Common.defined com Define.DumpIgnoreVarIds) in
+	let restore =
+		if not pretty then
+			let old = !TPrinting.MonomorphPrinting.show_mono_ids in
+			TPrinting.MonomorphPrinting.show_mono_ids := print_ids;
+			fun () -> TPrinting.MonomorphPrinting.show_mono_ids := old
+		else fun () -> ()
+	in
+	let s_type = s_type (Type.print_context()) in
+	let s_expr,s_type_param = if not pretty then
+		(Type.s_expr_ast print_ids "\t"),(Printer.s_type_param "")
+	else
+		(Type.s_expr_pretty false "\t" true),(s_type_param s_type)
+	in
+	let params tl = match tl with
+		| [] -> ""
+		| l -> Printf.sprintf "<%s>" (String.concat ", " (List.map s_type_param l))
+	in
+	let f mt =
+		let path = Type.t_path mt in
+		let buf,close = create_dumpfile_from_path com path in
+		let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
+		let s_metas ml tabs =
+			let args el =
+				match el with
+				| [] -> ""
+				| el -> Printf.sprintf "(%s)" (String.concat ", " (List.map (fun e -> Ast.Printer.s_expr e) el)) in
+			match ml with
+			| [] -> ""
+			| ml -> String.concat " " (List.map (fun me -> match me with (m,el,_) -> "@" ^ Meta.to_string m ^ args el) ml) ^ "\n" ^ tabs in
+		(match mt with
+		| Type.TClassDecl c ->
+			let s_cf_expr f =
+				match f.cf_expr with
+				| None -> ""
+				| Some e -> Printf.sprintf "%s" (s_expr s_type e) in
+			let is_inline_var v : bool = v = Var { v_read = AccInline; v_write = AccNever } in
+			let rec print_field stat f =
+				print "\n\t%s%s%s%s%s %s%s"
+					(s_metas f.cf_meta "\t")
+					(if (has_class_field_flag f CfPublic && not ((has_class_flag c CExtern) || (has_class_flag c CInterface))) then "public " else "")
+					(if stat then "static " else "")
+					(match f.cf_kind with
+						| Var v when (is_inline_var f.cf_kind) -> "inline "
+						| Var v -> ""
+						| Method m ->
+							match m with
+							| MethNormal -> ""
+							| MethDynamic -> "dynamic "
+							| MethInline -> "inline "
+							| MethMacro -> "macro ")
+					(match f.cf_kind with Var v -> "var" | Method m -> "function")
+					(f.cf_name ^ match f.cf_kind with
+						| Var { v_read = AccNormal; v_write = AccNormal } -> ""
+						| Var v when (is_inline_var f.cf_kind) -> ""
+						| Var v -> "(" ^ s_access true v.v_read ^ "," ^ s_access false v.v_write ^ ")"
+						| _ -> "")
+					(params f.cf_params);
+				(match f.cf_kind with
+					| Var v -> print ":%s%s;" (s_type f.cf_type)
+						(match f.cf_expr with
+						| None -> ""
+						| Some e -> " = " ^ (s_cf_expr f));
+					| Method m -> if ((has_class_flag c CExtern) || (has_class_flag c CInterface)) then (
+						match f.cf_type with
+						| TFun(al,t) -> print "(%s):%s;" (String.concat ", " (
+							List.map (fun (n,o,t) -> n ^ ":" ^ (s_type t)) al))
+							(s_type t)
+						| _ -> ()
+					) else print "%s" (s_cf_expr f));
+				print "\n";
+				List.iter (fun f -> print_field stat f) f.cf_overloads
+			in
+			print "%s%s%s%s %s%s" (s_metas c.cl_meta "") (if c.cl_private then "private " else "") (if (has_class_flag c CExtern) then "extern " else "") (if (has_class_flag c CInterface) then "interface" else "class") (s_type_path path) (params c.cl_params);
+			(match c.cl_super with None -> () | Some (c,pl) -> print " extends %s" (s_type (TInst (c,pl))));
+			List.iter (fun (c,pl) -> print " implements %s" (s_type (TInst (c,pl)))) c.cl_implements;
+			(match c.cl_array_access with None -> () | Some t -> print " implements ArrayAccess<%s>" (s_type t));
+			print " {\n";
+			(match c.cl_constructor with
+			| None -> ()
+			| Some f -> print_field false f);
+			List.iter (print_field false) c.cl_ordered_fields;
+			List.iter (print_field true) c.cl_ordered_statics;
+			(match TClass.get_cl_init c with
+			| None -> ()
+			| Some e ->
+				print "\n\tstatic function __init__() ";
+				print "%s" (s_expr s_type e);
+				print "\n");
+			print "}";
+		| Type.TEnumDecl e ->
+			print "%s%s%senum %s%s {\n" (s_metas e.e_meta "") (if e.e_private then "private " else "") (if has_enum_flag e EnExtern then "extern " else "") (s_type_path path) (params e.e_params);
+			List.iter (fun n ->
+				let f = PMap.find n e.e_constrs in
+				print "\t%s%s;\n" f.ef_name (
+					match f.ef_type with
+					| TFun (al,t) -> Printf.sprintf "(%s)" (String.concat ", "
+						(List.map (fun (n,o,t) -> (if o then "?" else "") ^ n ^ ":" ^ (s_type t)) al))
+					| _ -> "")
+			) e.e_names;
+			print "}"
+		| Type.TTypeDecl t ->
+			print "%s%stypedef %s%s = %s" (s_metas t.t_meta "") (if t.t_private then "private " else "") (s_type_path path) (params t.t_params) (s_type t.t_type);
+		| Type.TAbstractDecl a ->
+			print "%s%sabstract %s%s%s%s {}" (s_metas a.a_meta "") (if a.a_private then "private " else "") (s_type_path path) (params a.a_params)
+			(String.concat " " (List.map (fun t -> " from " ^ s_type t) a.a_from))
+			(String.concat " " (List.map (fun t -> " to " ^ s_type t) a.a_to));
+		);
+		close()
+	in
+	Parallel.run_in_new_pool com.timer_ctx (fun pool ->
+		Parallel.ParallelArray.iter pool f (Array.of_list com.types)
+	);
+	restore()
+
+let dump_record com =
+	let f mt =
+		let buf,close = create_dumpfile_from_path com (t_path mt) in
+		let s = match mt with
+			| TClassDecl c -> Printer.s_tclass "" c
+			| TEnumDecl en -> Printer.s_tenum "" en
+			| TTypeDecl t -> Printer.s_tdef "" t
+			| TAbstractDecl a -> Printer.s_tabstract "" a
+		in
+		Buffer.add_string buf s;
+		close()
+	in
+	Parallel.run_in_new_pool com.timer_ctx (fun pool ->
+		Parallel.ParallelArray.iter pool f (Array.of_list com.types)
+	)
+
+let dump_position com =
+	let f mt =
+		match mt with
+			| TClassDecl c ->
+				let buf,close = create_dumpfile_from_path com (t_path mt) in
+				Printf.bprintf buf "%s\n" (s_type_path c.cl_path);
+				let field cf =
+					Printf.bprintf buf "\t%s\n" cf.cf_name;
+					begin match cf.cf_expr with
+					| None -> ()
+					| Some e ->
+						Printf.bprintf buf "%s\n" (Texpr.dump_with_pos "\t" e);
+					end
+				in
+				Option.may field c.cl_constructor;
+				List.iter field c.cl_ordered_statics;
+				List.iter field c.cl_ordered_fields;
+				close();
+			| _ ->
+				()
+	in
+	Parallel.run_in_new_pool com.timer_ctx (fun pool ->
+		Parallel.ParallelArray.iter pool f (Array.of_list com.types)
+	)
+
+let dump_types com =
+	match Common.defined_value_safe com Define.Dump with
+		| "pretty" -> dump_types com true
+		| "record" -> dump_record com
+		| "position" -> dump_position com
+		| _ -> dump_types com false
+
+let dump_dependencies ?(target_override=None) com =
+	let target_name = match target_override with
+		| None -> platform_name_macro com
+		| Some s -> s
+	in
+	let dump_dependencies_path = [dump_path com.defines;target_name;"dependencies"] in
+	let buf,close = create_dumpfile [] dump_dependencies_path in
+	let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
+	let dep = Hashtbl.create 0 in
+	List.iter (fun m ->
+		print "%s:\n" (Path.UniqueKey.lazy_path m.m_extra.m_file);
+		PMap.iter (fun _ mdep ->
+			let com,ctx = match mdep.md_kind with
+				| MMacro when not com.is_macro_context ->
+					Option.get (com.get_macros()), "[macro] "
+				| _ -> com, ""
+			in
+			let mdep_extra =
+				try (com.module_lut#find mdep.md_path).m_extra
+				with Not_found -> (com.cs#get_context mdep.md_sign)#find_module_extra mdep.md_path
+			in
+			let file = Path.UniqueKey.lazy_path mdep_extra.m_file in
+			print "\t%s%s\n" ctx file;
+			let l = try Hashtbl.find dep file with Not_found -> [] in
+			Hashtbl.replace dep file (m :: l)
+		) m.m_extra.m_deps;
+	) com.Common.modules;
+	close();
+	let dump_dependants_path = [dump_path com.defines;target_name;"dependants"] in
+	let buf,close = create_dumpfile [] dump_dependants_path in
+	let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
+	Hashtbl.iter (fun n ml ->
+		print "%s:\n" n;
+		List.iter (fun m ->
+			print "\t%s\n" (Path.UniqueKey.lazy_path m.m_extra.m_file);
+		) ml;
+	) dep;
+	close()

+ 132 - 0
src/codegen/fixOverrides.ml

@@ -0,0 +1,132 @@
+open Globals
+open Common
+open Type
+
+(* -------------------------------------------------------------------------- *)
+(* FIX OVERRIDES *)
+
+(*
+	on some platforms which doesn't support type parameters, we must have the
+	exact same type for overridden/implemented function as the original one
+*)
+
+let rec find_field com c f =
+	try
+		(match c.cl_super with
+		| None ->
+			raise Not_found
+		| Some ( {cl_path = (["cpp"],"FastIterator")}, _ ) ->
+			raise Not_found (* This is a strongly typed 'extern' and the usual rules don't apply *)
+		| Some (c,_) ->
+			find_field com c f)
+	with Not_found -> try
+		if com.platform = Cpp || com.platform = Hl then (* uses delegation for interfaces *)
+			raise Not_found;
+		let rec loop = function
+			| [] ->
+				raise Not_found
+			| (c,_) :: l ->
+				try
+					find_field com c f
+				with
+					Not_found -> loop l
+		in
+		loop c.cl_implements
+	with Not_found ->
+		let f = PMap.find f.cf_name c.cl_fields in
+		(match f.cf_kind with Var { v_read = AccRequire _ } -> raise Not_found | _ -> ());
+		f
+
+let fix_override com c f fd =
+	let f2 = (try Some (find_field com c f) with Not_found -> None) in
+	match f2,fd with
+		| Some (f2), Some(fd) ->
+			let targs, tret = (match follow f2.cf_type with TFun (args,ret) -> args, ret | _ -> die "" __LOC__) in
+			let changed_args = ref [] in
+			let prefix = "_tmp_" in
+			let nargs = List.map2 (fun ((v,ct) as cur) (_,_,t2) ->
+				try
+					type_eq EqStrict (monomorphs c.cl_params (monomorphs f.cf_params v.v_type)) t2;
+					(* Flash generates type parameters with a single constraint as that constraint type, so we
+					   have to detect this case and change the variable (issue #2712). *)
+					begin match follow v.v_type with
+						| TInst({cl_kind = KTypeParameter ttp} as cp,_) when com.platform = Flash ->
+							begin match get_constraints ttp with
+							| [tc] ->
+								if List.exists (fun tp -> tp.ttp_name = (snd cp.cl_path)) c.cl_params then raise (Unify_error [])
+							| _ ->
+								()
+							end
+						| _ ->
+							()
+					end;
+					cur
+				with Unify_error _ ->
+					let v2 = alloc_var VGenerated (prefix ^ v.v_name) t2 v.v_pos in
+					changed_args := (v,v2) :: !changed_args;
+					v2,ct
+			) fd.tf_args targs in
+			let fd2 = {
+				tf_args = nargs;
+				tf_type = tret;
+				tf_expr = (match List.rev !changed_args with
+					| [] -> fd.tf_expr
+					| args ->
+						let e = fd.tf_expr in
+						let el = (match e.eexpr with TBlock el -> el | _ -> [e]) in
+						let p = (match el with [] -> e.epos | e :: _ -> e.epos) in
+						let el_v = List.map (fun (v,v2) ->
+							mk (TVar (v,Some (mk (TCast (mk (TLocal v2) v2.v_type p,None)) v.v_type p))) com.basic.tvoid p
+						) args in
+						{ e with eexpr = TBlock (el_v @ el) }
+				);
+			} in
+			let targs = List.map (fun(v,c) -> (v.v_name, Option.is_some c, v.v_type)) nargs in
+			let fde = (match f.cf_expr with None -> die "" __LOC__ | Some e -> e) in
+			f.cf_expr <- Some { fde with eexpr = TFunction fd2 };
+			f.cf_type <- TFun(targs,tret);
+		| Some(f2), None when (has_class_flag c CInterface) ->
+			let targs, tret = (match follow f2.cf_type with TFun (args,ret) -> args, ret | _ -> die "" __LOC__) in
+			f.cf_type <- TFun(targs,tret)
+		| _ ->
+			()
+
+let fix_overrides com t =
+	match t with
+	| TClassDecl c ->
+		(* overrides can be removed from interfaces *)
+		if (has_class_flag c CInterface) then
+			c.cl_ordered_fields <- List.filter (fun f ->
+				try
+					if find_field com c f == f then raise Not_found;
+					c.cl_fields <- PMap.remove f.cf_name c.cl_fields;
+					false;
+				with Not_found ->
+					true
+			) c.cl_ordered_fields;
+		List.iter (fun f ->
+			match f.cf_expr, f.cf_kind with
+			| Some { eexpr = TFunction fd }, Method (MethNormal | MethInline) ->
+				fix_override com c f (Some fd)
+			| None, Method (MethNormal | MethInline) when (has_class_flag c CInterface) ->
+				fix_override com c f None
+			| _ ->
+				()
+		) c.cl_ordered_fields
+	| _ ->
+		()
+
+(*
+	PHP does not allow abstract classes extending other abstract classes to override any fields, so these duplicates
+	must be removed from the child interface
+*)
+let fix_abstract_inheritance com t =
+	match t with
+	| TClassDecl c when (has_class_flag c CInterface) ->
+		c.cl_ordered_fields <- List.filter (fun f ->
+			let b = try (find_field com c f) == f
+			with Not_found -> false in
+			if not b then c.cl_fields <- PMap.remove f.cf_name c.cl_fields;
+			b;
+		) c.cl_ordered_fields
+	| _ -> ()

+ 17 - 14
src/codegen/genxml.ml

@@ -70,7 +70,7 @@ let tpath t =
 	real_path i.mt_path i.mt_meta
 
 let rec follow_param t =
-	match t with
+	match (follow_lazy t) with
 	| TMono r ->
 		(match r.tm_type with
 		| Some t -> follow_param t
@@ -212,7 +212,7 @@ let rec exists f c =
 			| None -> false
 			| Some (csup,_) -> exists f csup
 
-let rec gen_type_decl com pos t =
+let rec gen_type_decl pos t =
 	let m = (t_infos t).mt_module in
 	match t with
 	| TClassDecl c ->
@@ -257,7 +257,7 @@ let rec gen_type_decl com pos t =
 		let mk_field_cast (t,cf) = if Meta.has Meta.NoDoc cf.cf_meta then None else Some (node "icast" ["field",cf.cf_name] [gen_type t]) in
 		let sub = (match a.a_from,a.a_from_field with [],[] -> [] | l1,l2 -> [node "from" [] ((List.map mk_cast l1) @ (ExtList.List.filter_map mk_field_cast l2))]) in
 		let super = (match a.a_to,a.a_to_field with [],[] -> [] | l1,l2 -> [node "to" [] ((List.map mk_cast l1) @ (ExtList.List.filter_map mk_field_cast l2))]) in
-		let impl = (match a.a_impl with None -> [] | Some c -> [node "impl" [] [gen_type_decl com pos (TClassDecl c)]]) in
+		let impl = (match a.a_impl with None -> [] | Some c -> [node "impl" [] [gen_type_decl pos (TClassDecl c)]]) in
 		let this = [node "this" [] [gen_type a.a_this]] in
 		node "abstract" (gen_type_params pos a.a_private (tpath t) a.a_params a.a_pos m) (sub @ this @ super @ doc @ meta @ impl)
 
@@ -288,18 +288,21 @@ let rec write_xml ch tabs x =
 		IO.printf ch "<![CDATA[%s]]>" s
 
 let generate com file =
-	let t = Timer.timer ["generate";"xml"] in
-	let x = node "haxe" [] (List.map (gen_type_decl com true) (List.filter (fun t -> not (Meta.has Meta.NoDoc (t_infos t).mt_meta)) com.types)) in
-	t();
-	let t = Timer.timer ["write";"xml"] in
-	let ch = IO.output_channel (open_out_bin file) in
-	IO.printf ch "<!-- This file can be parsed by haxe.rtti.XmlParser -->\n";
-	write_xml ch "" x;
-	IO.close_out ch;
-	t()
+	let f () =
+		node "haxe" [] (List.map (gen_type_decl true) (List.filter (fun t -> not (Meta.has Meta.NoDoc (t_infos t).mt_meta)) com.types))
+	in
+	let x = Timer.time com.timer_ctx ["generate";"xml"] f () in
+
+	let f () =
+		let ch = IO.output_channel (open_out_bin file) in
+		IO.printf ch "<!-- This file can be parsed by haxe.rtti.XmlParser -->\n";
+		write_xml ch "" x;
+		IO.close_out ch;
+	in
+	Timer.time com.timer_ctx ["write";"xml"] f ()
 
-let gen_type_string ctx t =
-	let x = gen_type_decl ctx false t in
+let gen_type_string t =
+	let x = gen_type_decl false t in
 	let ch = IO.output_string() in
 	write_xml ch "" x;
 	IO.close_out ch

+ 42 - 39
src/codegen/javaModern.ml

@@ -672,13 +672,13 @@ module SignatureConverter = struct
 
 	and convert_signature ctx p jsig =
 		match jsig with
-		| TByte -> mk_type_path (["java"; "types"], "Int8") [] p
-		| TChar -> mk_type_path (["java"; "types"], "Char16") [] p
+		| TByte -> mk_type_path (["jvm"], "Int8") [] p
+		| TChar -> mk_type_path (["jvm"], "Char16") [] p
 		| TDouble -> mk_type_path ([], "Float") [] p
 		| TFloat -> mk_type_path ([], "Single") [] p
 		| TInt -> mk_type_path ([], "Int") [] p
 		| TLong -> mk_type_path (["haxe"], "Int64") [] p
-		| TShort -> mk_type_path (["java"; "types"], "Int16") [] p
+		| TShort -> mk_type_path (["jvm"], "Int16") [] p
 		| TBool -> mk_type_path ([], "Bool") [] p
 		| TObject ( (["haxe";"root"], name), args ) -> mk_type_path ([], name) (List.map (convert_arg ctx p) args) p
 		| TObject ( (["java";"lang"], "Object"), [] ) -> mk_type_path ([], "Dynamic") [] p
@@ -693,7 +693,7 @@ module SignatureConverter = struct
 			| _ -> die "" __LOC__ in
 			mk_type_path (pack, name ^ "$" ^ String.concat "$" (List.map fst inners)) (List.map (fun param -> convert_arg ctx p param) actual_param) p
 		| TObjectInner (pack, inners) -> die "" __LOC__
-		| TArray (jsig, _) -> mk_type_path (["java"], "NativeArray") [ TPType (convert_signature ctx p jsig,p) ] p
+		| TArray (jsig, _) -> mk_type_path (["jvm"], "NativeArray") [ TPType (convert_signature ctx p jsig,p) ] p
 		| TMethod _ -> failwith "TMethod cannot be converted directly into Complex Type"
 		| TTypeParameter s ->
 			try
@@ -754,11 +754,7 @@ module Converter = struct
 		tp
 
 	let convert_enum (jc : jclass) (file : string) =
-		let p = {
-			pfile = file;
-			pmin = 0;
-			pmax = 0
-		} in
+		let p = file_pos file in
 		let meta = ref [] in
 		let add_meta m = meta := m :: !meta in
 		let data = ref [] in
@@ -920,11 +916,7 @@ module Converter = struct
 		cff
 
 	let convert_class ctx (jc : jclass) (file : string) =
-		let p = {
-			pfile = file;
-			pmin = 0;
-			pmax = 0
-		} in
+		let p = file_pos file in
 		let flags = ref [HExtern] in
 		let meta = ref [] in
 		let add_flag f = flags := f :: !flags in
@@ -993,6 +985,16 @@ module Converter = struct
 			in
 			add_meta (Meta.Annotation,args,p)
 		end;
+		List.iter (fun attr -> match attr with
+			| AttrVisibleAnnotations ann ->
+				List.iter (function
+					| { ann_type = TObject( (["java";"lang"], "FunctionalInterface"), [] ) } ->
+						add_meta (Meta.FunctionalInterface,[],p);
+					| _ -> ()
+				) ann
+			| _ ->
+				()
+		) jc.jc_attributes;
 		let d = {
 			d_name = (class_name,p);
 			d_doc = None;
@@ -1016,7 +1018,7 @@ module Converter = struct
 		(pack,types)
 end
 
-class java_library_modern com name file_path = object(self)
+class java_library_modern com  name file_path = object(self)
 	inherit [java_lib_type,unit] native_library name file_path as super
 
 
@@ -1026,35 +1028,36 @@ class java_library_modern com name file_path = object(self)
 	val mutable loaded = false
 	val mutable closed = false
 
+	method private do_load =
+		List.iter (function
+		| ({ Zip.is_directory = false; Zip.filename = filename } as entry) when String.ends_with filename ".class" ->
+			let pack = String.nsplit filename "/" in
+			begin match List.rev pack with
+				| [] -> ()
+				| name :: pack ->
+					let name = String.sub name 0 (String.length name - 6) in
+					let pack = List.rev pack in
+					let pack,(mname,tname) = PathConverter.jpath_to_hx (pack,name) in
+					let path = PathConverter.jpath_to_path (pack,(mname,tname)) in
+					let mname = match mname with
+						| None ->
+							cached_files <- path :: cached_files;
+							tname
+						| Some mname -> mname
+					in
+					Hashtbl.add modules (pack,mname) (filename,entry);
+				end
+		| _ -> ()
+	) (Zip.entries (Lazy.force zip));
+
 	method load =
 		if not loaded then begin
 			loaded <- true;
-			let close = Timer.timer ["jar";"load"] in
-			List.iter (function
-				| ({ Zip.is_directory = false; Zip.filename = filename } as entry) when String.ends_with filename ".class" ->
-					let pack = String.nsplit filename "/" in
-					begin match List.rev pack with
-						| [] -> ()
-						| name :: pack ->
-							let name = String.sub name 0 (String.length name - 6) in
-							let pack = List.rev pack in
-							let pack,(mname,tname) = PathConverter.jpath_to_hx (pack,name) in
-							let path = PathConverter.jpath_to_path (pack,(mname,tname)) in
-							let mname = match mname with
-								| None ->
-									cached_files <- path :: cached_files;
-									tname
-								| Some mname -> mname
-							in
-							Hashtbl.add modules (pack,mname) (filename,entry);
-						end
-				| _ -> ()
-			) (Zip.entries (Lazy.force zip));
-			close();
+			Timer.time com.Common.timer_ctx ["jar";"load"] (fun () -> self#do_load) ()
 		end
 
 	method private read zip (filename,entry) =
-		Std.finally (Timer.timer ["jar";"read"]) (fun () ->
+		Timer.time com.Common.timer_ctx ["jar";"read"] (fun () ->
 			let data = Zip.read_entry zip entry in
 			let jc = JReaderModern.parse_class (IO.input_string data) in
 			(jc,file_path,file_path ^ "@" ^ filename)
@@ -1082,7 +1085,7 @@ class java_library_modern com name file_path = object(self)
 					if entries = [] then raise Not_found;
 					let zip = Lazy.force zip in
 					let jcs = List.map (self#read zip) entries in
-					Std.finally (Timer.timer ["jar";"convert"]) (fun () ->
+					Timer.time com.Common.timer_ctx ["jar";"convert"] (fun () ->
 						Some (Converter.convert_module (fst path) jcs)
 					) ();
 				with Not_found ->

+ 9 - 7
src/codegen/swfLoader.ml

@@ -147,7 +147,7 @@ let is_valid_path com pack name =
 
 let build_class com c file =
 	let path = (make_tpath c.hlc_name).path in
-	let pos = { pfile = file ^ "@" ^ s_type_path (path.tpackage,path.tname); pmin = 0; pmax = 0 } in
+	let pos = file_pos (file ^ "@" ^ s_type_path (path.tpackage,path.tname)) in
 	match path with
 	| { tpackage = ["flash";"utils"]; tname = ("Object"|"Function") } ->
 		let inf = {
@@ -456,7 +456,6 @@ let build_class com c file =
 	(path.tpackage, [(EClass class_data,pos)])
 
 let extract_data (_,tags) =
-	let t = Timer.timer ["read";"swf"] in
 	let h = Hashtbl.create 0 in
 	let loop_field f =
 		match f.hlf_kind with
@@ -474,9 +473,11 @@ let extract_data (_,tags) =
 			List.iter (fun i -> Array.iter loop_field i.hls_fields) (As3hlparse.parse as3)
 		| _ -> ()
 	) tags;
-	t();
 	h
 
+let extract_data com arg =
+	Timer.time com.timer_ctx ["read";"swf"] extract_data arg
+
 let remove_debug_infos as3 =
 	let hl = As3hlparse.parse as3 in
 	let methods = Hashtbl.create 0 in
@@ -547,8 +548,7 @@ let remove_debug_infos as3 =
 	in
 	As3hlparse.flatten (List.map loop_static hl)
 
-let parse_swf com file =
-	let t = Timer.timer ["read";"swf"] in
+let parse_swf file =
 	let is_swc = Path.file_extension file = "swc" || Path.file_extension file = "ane" in
 	let ch = if is_swc then begin
 		let zip = Zip.open_in file in
@@ -577,9 +577,11 @@ let parse_swf com file =
 			t.tdata <- TActionScript3 (id,remove_debug_infos as3)
 		| _ -> ()
 	) tags;
-	t();
 	(h,tags)
 
+let parse_swf com file =
+	Timer.time com.timer_ctx ["read";"swf"] parse_swf file
+
 class swf_library com name file_path = object(self)
 	inherit [swf_lib_type,Swf.swf] native_library name file_path
 
@@ -600,7 +602,7 @@ class swf_library com name file_path = object(self)
 
 	method extract = match swf_classes with
 		| None ->
-			let d = extract_data self#get_swf in
+			let d = extract_data com self#get_swf in
 			swf_classes <- Some d;
 			d
 		| Some d ->

+ 18 - 18
src/compiler/args.ml

@@ -43,7 +43,7 @@ let process_args arg_spec =
 let parse_args com =
 	let usage = Printf.sprintf
 		"Haxe Compiler %s - (C)2005-2024 Haxe Foundation\nUsage: haxe%s <target> [options] [hxml files and dot paths...]\n"
-		s_version_full (if Sys.os_type = "Win32" then ".exe" else "")
+		(s_version_full com.version) (if Sys.os_type = "Win32" then ".exe" else "")
 	in
 	let actx = {
 		classes = [([],"Std")];
@@ -64,6 +64,7 @@ let parse_args com =
 		raise_usage = (fun () -> ());
 		display_arg = None;
 		deprecations = [];
+		measure_times = false;
 	} in
 	let add_deprecation s =
 		actx.deprecations <- s :: actx.deprecations
@@ -104,9 +105,9 @@ let parse_args com =
 		),"<name[=path]>","generate code for a custom target");
 		("Target",[],["-x"], Arg.String (fun cl ->
 			let cpath = Path.parse_type_path cl in
-			(match com.main.main_class with
+			(match com.main.main_path with
 				| Some c -> if cpath <> c then raise (Arg.Bad "Multiple --main classes specified")
-				| None -> com.main.main_class <- Some cpath);
+				| None -> com.main.main_path <- Some cpath);
 			actx.classes <- cpath :: actx.classes;
 			Common.define com Define.Interp;
 			set_platform com Eval "";
@@ -131,9 +132,9 @@ let parse_args com =
 			actx.hxb_libs <- lib :: actx.hxb_libs
 		),"<path>","add a hxb library");
 		("Compilation",["-m";"--main"],["-main"],Arg.String (fun cl ->
-			if com.main.main_class <> None then raise (Arg.Bad "Multiple --main classes specified");
+			if com.main.main_path <> None then raise (Arg.Bad "Multiple --main classes specified");
 			let cpath = Path.parse_type_path cl in
-			com.main.main_class <- Some cpath;
+			com.main.main_path <- Some cpath;
 			actx.classes <- cpath :: actx.classes
 		),"<class>","select startup class");
 		("Compilation",["-L";"--library"],["-lib"],Arg.String (fun _ -> ()),"<name[:ver]>","use a haxelib library");
@@ -154,8 +155,7 @@ let parse_args com =
 			com.debug <- true;
 		),"","add debug information to the compiled code");
 		("Miscellaneous",["--version"],["-version"],Arg.Unit (fun() ->
-			com.info s_version_full null_pos;
-			actx.did_something <- true;
+			raise (Helper.HelpMessage (s_version_full com.version));
 		),"","print version and exit");
 		("Miscellaneous", ["-h";"--help"], ["-help"], Arg.Unit (fun () ->
 			raise (Arg.Help "")
@@ -163,31 +163,27 @@ let parse_args com =
 		("Miscellaneous",["--help-defines"],[], Arg.Unit (fun() ->
 			let all,max_length = Define.get_documentation_list com.user_defines in
 			let all = List.map (fun (n,doc) -> Printf.sprintf " %-*s: %s" max_length n (limit_string doc (max_length + 3))) all in
-			List.iter (fun msg -> com.print (msg ^ "\n")) all;
-			actx.did_something <- true
+			raise (Helper.HelpMessage (ExtLib.String.join "\n" all));
 		),"","print help for all compiler specific defines");
 		("Miscellaneous",["--help-user-defines"],[], Arg.Unit (fun() ->
 			actx.did_something <- true;
 			com.callbacks#add_after_init_macros (fun() ->
 				let all,max_length = Define.get_user_documentation_list com.user_defines in
 				let all = List.map (fun (n,doc) -> Printf.sprintf " %-*s: %s" max_length n (limit_string doc (max_length + 3))) all in
-				List.iter (fun msg -> com.print (msg ^ "\n")) all;
-				raise Abort
+				raise (Helper.HelpMessage (ExtLib.String.join "\n" all));
 			)
 		),"","print help for all user defines");
 		("Miscellaneous",["--help-metas"],[], Arg.Unit (fun() ->
 			let all,max_length = Meta.get_documentation_list com.user_metas in
 			let all = List.map (fun (n,doc) -> Printf.sprintf " %-*s: %s" max_length n (limit_string doc (max_length + 3))) all in
-			List.iter (fun msg -> com.print (msg ^ "\n")) all;
-			actx.did_something <- true
+			raise (Helper.HelpMessage (ExtLib.String.join "\n" all));
 		),"","print help for all compiler metadatas");
 		("Miscellaneous",["--help-user-metas"],[], Arg.Unit (fun() ->
 			actx.did_something <- true;
 			com.callbacks#add_after_init_macros (fun() ->
 				let all,max_length = Meta.get_user_documentation_list com.user_metas in
 				let all = List.map (fun (n,doc) -> Printf.sprintf " %-*s: %s" max_length n (limit_string doc (max_length + 3))) all in
-				List.iter (fun msg -> com.print (msg ^ "\n")) all;
-				raise Abort
+				raise (Helper.HelpMessage (ExtLib.String.join "\n" all));
 			)
 		),"","print help for all user metadatas");
 	] in
@@ -266,7 +262,9 @@ let parse_args com =
 			actx.hxb_out <- Some file;
 		),"<file>", "generate haxe binary representation to target archive");
 		("Optimization",["--no-output"],[], Arg.Unit (fun() -> actx.no_output <- true),"","compiles but does not generate any file");
-		("Debug",["--times"],[], Arg.Unit (fun() -> Timer.measure_times := true),"","measure compilation times");
+		("Debug",["--times"],[], Arg.Unit (fun() ->
+			actx.measure_times <- true
+		),"","measure compilation times");
 		("Optimization",["--no-inline"],[],Arg.Unit (fun () ->
 			add_deprecation "--no-inline has been deprecated, use -D no-inline instead";
 			Common.define com Define.NoInline
@@ -298,7 +296,7 @@ let parse_args com =
 		),"<directory>","set current working directory");
 		("Compilation",["--haxelib-global"],[], Arg.Unit (fun () -> ()),"","pass --global argument to haxelib");
 		("Compilation",["-w"],[], Arg.String (fun s ->
-			let p = { pfile = "-w " ^ s; pmin = 0; pmax = 0 } in
+			let p = fake_pos ("-w " ^ s) in
 			let l = Warning.parse_options s p in
 			com.warning_options <- l :: com.warning_options
 		),"<warning list>","enable or disable specific warnings");
@@ -330,11 +328,13 @@ let parse_args com =
 					List.rev acc
 			in
 			let args = loop [] args in
-			Arg.parse_argv ~current (Array.of_list ("" :: args)) all_args_spec args_callback "";
+			Arg.parse_argv ~current (Array.of_list ("Haxe" :: args)) all_args_spec args_callback "";
 		with
 		| Arg.Help _ ->
 			raise (Helper.HelpMessage (usage_string all_args usage))
 		| Arg.Bad msg ->
+			(* Strip error prefix added by ocaml's arg parser *)
+			let msg = if ExtLib.String.starts_with msg "Haxe: " then (String.sub msg 6 ((String.length msg) - 6)) else msg in
 			let first_line = List.nth (Str.split (Str.regexp "\n") msg) 0 in
 			let new_msg = (Printf.sprintf "%s" first_line) in
 			let r = Str.regexp "unknown option [`']?\\([-A-Za-z]+\\)[`']?" in

+ 56 - 25
src/compiler/compilationCache.ml

@@ -35,6 +35,7 @@ class context_cache (index : int) (sign : Digest.t) = object(self)
 	val files : (Path.UniqueKey.t,cached_file) Hashtbl.t = Hashtbl.create 0
 	val modules : (path,module_def) Hashtbl.t = Hashtbl.create 0
 	val binary_cache : (path,HxbData.module_cache) Hashtbl.t = Hashtbl.create 0
+	val tmp_binary_cache : (path,HxbData.module_cache) Hashtbl.t = Hashtbl.create 0
 	val removed_files = Hashtbl.create 0
 	val mutable json = JNull
 	val mutable initialized = false
@@ -66,26 +67,48 @@ class context_cache (index : int) (sign : Digest.t) = object(self)
 	method find_module_opt path =
 		Hashtbl.find_opt modules path
 
-	method find_module_extra path =
-		try (Hashtbl.find modules path).m_extra with Not_found -> (Hashtbl.find binary_cache path).mc_extra
+	method get_hxb_module path =
+		try Hashtbl.find tmp_binary_cache path
+		with Not_found ->
+			let mc = Hashtbl.find binary_cache path in
+			let m_extra = { mc.mc_extra with m_deps = mc.mc_extra.m_deps } in
+			let mc = { mc with mc_extra = m_extra } in
+			Hashtbl.add tmp_binary_cache path mc;
+			mc
 
-	method cache_module config warn anon_identification path m =
+	method find_module_extra path =
+		try (Hashtbl.find modules path).m_extra
+		with Not_found -> (self#get_hxb_module path).mc_extra
+
+	method add_binary_cache m chunks =
+		Hashtbl.replace binary_cache m.m_path {
+			mc_path = m.m_path;
+			mc_id = m.m_id;
+			mc_chunks = chunks;
+			mc_extra = { m.m_extra with m_cache_state = MSGood; m_display_deps = None }
+		}
+
+	method cache_hxb_module config warn anon_identification m =
 		match m.m_extra.m_kind with
 		| MImport ->
-			Hashtbl.add modules m.m_path m
+			Hashtbl.add modules m.m_path m;
+			None
 		| _ ->
-			let writer = HxbWriter.create config warn anon_identification in
-			HxbWriter.write_module writer m;
-			let chunks = HxbWriter.get_chunks writer in
-			Hashtbl.replace binary_cache path {
-				mc_path = path;
-				mc_id = m.m_id;
-				mc_chunks = chunks;
-				mc_extra = { m.m_extra with m_cache_state = MSGood }
-			}
+			Some (fun () ->
+				let writer = HxbWriter.create config warn anon_identification in
+				HxbWriter.write_module writer m;
+				HxbWriter.get_chunks writer
+			)
+
+	method cache_module_in_memory path m =
+		Hashtbl.replace modules path m
+
+	method clear_temp_cache =
+		Hashtbl.clear tmp_binary_cache
 
 	method clear_cache =
-		Hashtbl.clear modules
+		Hashtbl.clear modules;
+		self#clear_temp_cache
 
 	(* initialization *)
 
@@ -98,7 +121,6 @@ class context_cache (index : int) (sign : Digest.t) = object(self)
 	method get_modules = modules
 
 	method get_hxb = binary_cache
-	method get_hxb_module path = Hashtbl.find binary_cache path
 
 	(* TODO handle hxb cache there too *)
 	method get_removed_files = removed_files
@@ -120,8 +142,7 @@ class virtual server_task (id : string list) (priority : int) = object(self)
 	method private virtual execute : unit
 
 	method run : unit =
-		let t = Timer.timer ("server" :: "task" :: id) in
-		Std.finally t (fun () -> self#execute) ()
+		self#execute
 
 	method get_priority = priority
 	method get_id = id
@@ -152,6 +173,9 @@ class cache = object(self)
 
 	(* contexts *)
 
+	method clear_temp_cache =
+		Hashtbl.iter (fun _ ctx -> ctx#clear_temp_cache) contexts
+
 	method get_context sign =
 		try
 			Hashtbl.find contexts sign
@@ -211,15 +235,27 @@ class cache = object(self)
 			) cc#get_modules acc
 		) contexts []
 
+	method taint_module m_path reason =
+		Hashtbl.iter (fun _ cc ->
+			Hashtbl.iter (fun _ m ->
+				if m.m_path = m_path then m.m_extra.m_cache_state <- MSBad (Tainted reason)
+			) cc#get_modules;
+			Hashtbl.iter (fun _ mc ->
+				if mc.HxbData.mc_path = m_path then
+					mc.HxbData.mc_extra.m_cache_state <- match reason, mc.mc_extra.m_cache_state with
+					| CheckDisplayFile, (MSBad _ as state) -> state
+					| _ -> MSBad (Tainted reason)
+			) cc#get_hxb
+		) contexts
+
 	method taint_modules file_key reason =
 		Hashtbl.iter (fun _ cc ->
 			Hashtbl.iter (fun _ m ->
 				if Path.UniqueKey.lazy_key m.m_extra.m_file = file_key then m.m_extra.m_cache_state <- MSBad (Tainted reason)
 			) cc#get_modules;
-			let open HxbData in
 			Hashtbl.iter (fun _ mc ->
-				if Path.UniqueKey.lazy_key mc.mc_extra.m_file = file_key then
-					mc.mc_extra.m_cache_state <- match reason, mc.mc_extra.m_cache_state with
+				if Path.UniqueKey.lazy_key mc.HxbData.mc_extra.m_file = file_key then
+					mc.HxbData.mc_extra.m_cache_state <- match reason, mc.HxbData.mc_extra.m_cache_state with
 					| CheckDisplayFile, (MSBad _ as state) -> state
 					| _ -> MSBad (Tainted reason)
 			) cc#get_hxb
@@ -310,8 +346,3 @@ class cache = object(self)
 end
 
 type t = cache
-
-type context_options =
-	| NormalContext
-	| MacroContext
-	| NormalAndMacroContext

+ 15 - 5
src/compiler/compilationContext.ml

@@ -37,13 +37,14 @@ type arg_context = {
 	mutable raise_usage : unit -> unit;
 	mutable display_arg : string option;
 	mutable deprecations : string list;
+	mutable measure_times : bool;
 }
 
 type communication = {
 	write_out : string -> unit;
 	write_err : string -> unit;
 	flush     : compilation_context -> unit;
-	exit      : int -> unit;
+	exit      : Timer.timer_context -> int -> unit;
 	is_server : bool;
 }
 
@@ -54,6 +55,7 @@ and compilation_context = {
 	mutable has_error : bool;
 	comm : communication;
 	mutable runtime_args : string list;
+	timer_ctx : Timer.timer_context;
 }
 
 type compilation_callbacks = {
@@ -80,16 +82,24 @@ let message ctx msg =
 	ctx.messages <- msg :: ctx.messages
 
 let error ctx ?(depth=0) ?(from_macro = false) msg p =
-	message ctx (make_compiler_message ~from_macro msg p depth DKCompilerMessage Error);
-	ctx.has_error <- true
+	message ctx (make_compiler_message ~from_macro msg p depth DKCompilerMessage Error)
+
+let after_error ctx =
+	ctx.has_error <- true;
+	if Common.fail_fast ctx.com then raise Abort
 
 let error_ext ctx (err : Error.error) =
 	Error.recurse_error (fun depth err ->
 		error ~depth ~from_macro:err.err_from_macro ctx (Error.error_msg err.err_message) err.err_pos
-	) err
+	) err;
+	after_error ctx
+
+let error ctx ?(depth=0) ?(from_macro = false) msg p =
+	error ctx ~depth ~from_macro msg p;
+	after_error ctx
 
 let create_native_lib file extern kind = {
 	lib_file = file;
 	lib_extern = extern;
 	lib_kind = kind;
-}
+}

+ 109 - 63
src/compiler/compiler.ml

@@ -2,16 +2,16 @@ open Globals
 open Common
 open CompilationContext
 
+let handle_diagnostics ctx msg p kind =
+	ctx.has_error <- true;
+	add_diagnostics_message ctx.com msg p kind Error;
+	match ctx.com.report_mode with
+	| RMLegacyDiagnostics _ -> DisplayOutput.emit_legacy_diagnostics ctx.com
+	| RMDiagnostics _ -> DisplayOutput.emit_diagnostics ctx.com
+	| _ -> die "" __LOC__
+
 let run_or_diagnose ctx f =
 	let com = ctx.com in
-	let handle_diagnostics msg p kind =
-		ctx.has_error <- true;
-		add_diagnostics_message com msg p kind Error;
-		match com.report_mode with
-		| RMLegacyDiagnostics _ -> DisplayOutput.emit_legacy_diagnostics ctx.com
-		| RMDiagnostics _ -> DisplayOutput.emit_diagnostics ctx.com
-		| _ -> die "" __LOC__
-	in
 	if is_diagnostics com then begin try
 			f ()
 		with
@@ -25,15 +25,14 @@ let run_or_diagnose ctx f =
 			| RMDiagnostics _ -> DisplayOutput.emit_diagnostics ctx.com
 			| _ -> die "" __LOC__)
 		| Parser.Error(msg,p) ->
-			handle_diagnostics (Parser.error_msg msg) p DKParserError
+			handle_diagnostics ctx (Parser.error_msg msg) p DKParserError
 		| Lexer.Error(msg,p) ->
-			handle_diagnostics (Lexer.error_msg msg) p DKParserError
+			handle_diagnostics ctx (Lexer.error_msg msg) p DKParserError
 		end
 	else
 		f ()
 
 let run_command ctx cmd =
-	let t = Timer.timer ["command";cmd] in
 	(* TODO: this is a hack *)
 	let cmd = if ctx.comm.is_server then begin
 		let h = Hashtbl.create 0 in
@@ -72,9 +71,11 @@ let run_command ctx cmd =
 			result
 		end
 	in
-	t();
 	result
 
+let run_command ctx cmd =
+	Timer.time ctx.timer_ctx ["command";cmd] (run_command ctx) cmd
+
 module Setup = struct
 	let initialize_target ctx com actx =
 		init_platform com;
@@ -190,6 +191,7 @@ module Setup = struct
 	let get_std_class_paths () =
 		try
 			let p = Sys.getenv "HAXE_STD_PATH" in
+			let p = Path.remove_trailing_slash p in
 			let rec loop = function
 				| drive :: path :: l ->
 					if String.length drive = 1 && ((drive.[0] >= 'a' && drive.[0] <= 'z') || (drive.[0] >= 'A' && drive.[0] <= 'Z')) then
@@ -229,17 +231,15 @@ module Setup = struct
 	let setup_common_context ctx =
 		let com = ctx.com in
 		ctx.com.print <- ctx.comm.write_out;
-		Common.define_value com Define.HaxeVer (Printf.sprintf "%.3f" (float_of_int Globals.version /. 1000.));
-		Common.raw_define com "haxe3";
-		Common.raw_define com "haxe4";
+		Common.define_value com Define.HaxeVer (Printf.sprintf "%.3f" (float_of_int version /. 1000.));
 		Common.define_value com Define.Haxe s_version;
 		Common.raw_define com "true";
-		Common.define_value com Define.Dce "std";
+		List.iter (fun (k,v) -> Define.raw_define_value com.defines k v) DefineList.default_values;
 		com.info <- (fun ?(depth=0) ?(from_macro=false) msg p ->
 			message ctx (make_compiler_message ~from_macro msg p depth DKCompilerMessage Information)
 		);
 		com.warning <- (fun ?(depth=0) ?(from_macro=false) w options msg p ->
-			match Warning.get_mode w (com.warning_options @ options) with
+			match Warning.get_mode w (options @ com.warning_options) with
 			| WMEnable ->
 				let wobj = Warning.warning_obj w in
 				let msg = if wobj.w_generic then
@@ -271,11 +271,17 @@ end
 
 let check_defines com =
 	if is_next com then begin
-		PMap.iter (fun k _ ->
+		PMap.iter (fun k v ->
 			try
 				let reason = Hashtbl.find Define.deprecation_lut k in
-				let p = { pfile = "-D " ^ k; pmin = -1; pmax = -1 } in
-				com.warning WDeprecatedDefine [] reason p
+				let p = fake_pos ("-D " ^ k) in
+				begin match reason with
+				| DueTo reason ->
+					com.warning WDeprecatedDefine [] reason p
+				| InFavorOf d ->
+					Define.raw_define_value com.defines d v;
+					com.warning WDeprecatedDefine [] (Printf.sprintf "-D %s has been deprecated in favor of -D %s" k d) p
+				end;
 			with Not_found ->
 				()
 		) com.defines.values
@@ -284,7 +290,6 @@ let check_defines com =
 (** Creates the typer context and types [classes] into it. *)
 let do_type ctx mctx actx display_file_dot_path =
 	let com = ctx.com in
-	let t = Timer.timer ["typing"] in
 	let cs = com.cs in
 	CommonCache.maybe_add_context_sign cs com "before_init_macros";
 	enter_stage com CInitMacrosStart;
@@ -294,6 +299,7 @@ let do_type ctx mctx actx display_file_dot_path =
 		Some (MacroContext.call_init_macro ctx.com mctx path)
 	) mctx (List.rev actx.config_macros) in
 	enter_stage com CInitMacrosDone;
+	update_platform_config com; (* make sure to adapt all flags changes defined during init macros *)
 	ServerMessage.compiler_stage com;
 
 	let macros = match mctx with None -> None | Some mctx -> mctx.g.macros in
@@ -307,7 +313,10 @@ let do_type ctx mctx actx display_file_dot_path =
 		com.callbacks#run com.error_ext com.callbacks#get_after_init_macros;
 		run_or_diagnose ctx (fun () ->
 			if com.display.dms_kind <> DMNone then DisplayTexpr.check_display_file tctx cs;
-			List.iter (fun cpath -> ignore(tctx.Typecore.g.Typecore.do_load_module tctx cpath null_pos)) (List.rev actx.classes);
+			List.iter (fun cpath ->
+				ignore(tctx.Typecore.g.Typecore.do_load_module tctx cpath null_pos);
+				Typecore.flush_pass tctx.g PBuildClass "actx.classes"
+			) (List.rev actx.classes);
 			Finalization.finalize tctx;
 		);
 	end with TypeloadParse.DisplayInMacroBlock ->
@@ -321,31 +330,35 @@ let do_type ctx mctx actx display_file_dot_path =
 		| (DMUsage _ | DMImplementation) -> FindReferences.find_possible_references tctx cs;
 		| _ -> ()
 	end;
-	t();
 	(tctx, display_file_dot_path)
 
 let finalize_typing ctx tctx =
-	let t = Timer.timer ["finalize"] in
 	let com = ctx.com in
+	let main_module = Finalization.maybe_load_main tctx in
 	enter_stage com CFilteringStart;
 	ServerMessage.compiler_stage com;
-	let main, types, modules = run_or_diagnose ctx (fun () -> Finalization.generate tctx) in
-	com.main.main_expr <- main;
+	let (main_expr,main_file),types,modules = run_or_diagnose ctx (fun () -> Finalization.generate tctx main_module) in
+	com.main.main_expr <- main_expr;
+	com.main.main_file <- main_file;
 	com.types <- types;
-	com.modules <- modules;
-	t()
+	com.modules <- modules
+
+let finalize_typing ctx tctx =
+	Timer.time ctx.timer_ctx ["finalize"] (finalize_typing ctx) tctx
 
-let filter ctx tctx before_destruction =
-	let t = Timer.timer ["filters"] in
-	DeprecationCheck.run ctx.com;
-	run_or_diagnose ctx (fun () -> Filters.run tctx ctx.com.main.main_expr before_destruction);
-	t()
+let filter ctx tctx ectx before_destruction =
+	Timer.time ctx.timer_ctx ["filters"] (fun () ->
+		DeprecationCheck.run ctx.com;
+		run_or_diagnose ctx (fun () -> Filters.run tctx ectx ctx.com.main.main_expr before_destruction)
+	) ()
 
 let compile ctx actx callbacks =
 	let com = ctx.com in
 	(* Set up display configuration *)
 	DisplayProcessing.process_display_configuration ctx;
+	let restore = disable_report_mode com in
 	let display_file_dot_path = DisplayProcessing.process_display_file com actx in
+	restore ();
 	let mctx = match com.platform with
 		| CustomTarget name ->
 			begin try
@@ -361,23 +374,24 @@ let compile ctx actx callbacks =
 	let ext = Setup.initialize_target ctx com actx in
 	update_platform_config com; (* make sure to adapt all flags changes defined after platform *)
 	callbacks.after_target_init ctx;
-	let t = Timer.timer ["init"] in
-	List.iter (fun f -> f()) (List.rev (actx.pre_compilation));
-	begin match actx.hxb_out with
-		| None ->
-			()
-		| Some file ->
-			com.hxb_writer_config <- HxbWriterConfig.process_argument file
-	end;
-	t();
+	Timer.time ctx.timer_ctx ["init"] (fun () ->
+		List.iter (fun f -> f()) (List.rev (actx.pre_compilation));
+		begin match actx.hxb_out with
+			| None ->
+				()
+			| Some file ->
+				com.hxb_writer_config <- HxbWriterConfig.process_argument file
+		end;
+	) ();
 	enter_stage com CInitialized;
 	ServerMessage.compiler_stage com;
 	if actx.classes = [([],"Std")] && not actx.force_typing then begin
 		if actx.cmds = [] && not actx.did_something then actx.raise_usage();
 	end else begin
 		(* Actual compilation starts here *)
-		let (tctx,display_file_dot_path) = do_type ctx mctx actx display_file_dot_path in
+		let (tctx,display_file_dot_path) = Timer.time ctx.timer_ctx ["typing"] (do_type ctx mctx actx) display_file_dot_path in
 		DisplayProcessing.handle_display_after_typing ctx tctx display_file_dot_path;
+		let ectx = ExceptionInit.create_exception_context tctx in
 		finalize_typing ctx tctx;
 		let is_compilation = is_compilation com in
 		com.callbacks#add_after_save (fun () ->
@@ -389,10 +403,10 @@ let compile ctx actx callbacks =
 					()
 		);
 		if is_diagnostics com then
-			filter ctx tctx (fun () -> DisplayProcessing.handle_display_after_finalization ctx tctx display_file_dot_path)
+			filter ctx com ectx (fun () -> DisplayProcessing.handle_display_after_finalization ctx tctx display_file_dot_path)
 		else begin
 			DisplayProcessing.handle_display_after_finalization ctx tctx display_file_dot_path;
-			filter ctx tctx (fun () -> ());
+			filter ctx com ectx (fun () -> ());
 		end;
 		if ctx.has_error then raise Abort;
 		if is_compilation then Generate.check_auxiliary_output com actx;
@@ -412,13 +426,15 @@ let compile ctx actx callbacks =
 		) (List.rev actx.cmds)
 	end
 
+let make_ice_message com msg backtrace =
+		let ver = (s_version_full com.version) in
+		let os_type = if Sys.unix then "unix" else "windows" in
+		Printf.sprintf "%s\nHaxe: %s; OS type: %s;\n%s" msg ver os_type backtrace
 let compile_safe ctx f =
 	let com = ctx.com in
 try
 	f ()
 with
-	| Abort ->
-		()
 	| Error.Fatal_error err ->
 		error_ext ctx err
 	| Lexer.Error (m,p) ->
@@ -426,21 +442,29 @@ with
 	| Parser.Error (m,p) ->
 		error ctx (Parser.error_msg m) p
 	| Typecore.Forbid_package ((pack,m,p),pl,pf)  ->
-		if !Parser.display_mode <> DMNone && ctx.has_next then begin
+		if ctx.com.display.dms_kind <> DMNone && ctx.has_next then begin
 			ctx.has_error <- false;
 			ctx.messages <- [];
 		end else begin
-			error ctx (Printf.sprintf "You cannot access the %s package while %s (for %s)" pack (if pf = "macro" then "in a macro" else "targeting " ^ pf) (s_type_path m) ) p;
-			List.iter (error ~depth:1 ctx (Error.compl_msg "referenced here")) (List.rev pl);
+			let sub = List.map (fun p -> Error.make_error ~depth:1 (Error.Custom (Error.compl_msg "referenced here")) p) pl in
+			error_ext ctx (Error.make_error (Error.Custom (Printf.sprintf "You cannot access the %s package while %s (for %s)" pack (if pf = "macro" then "in a macro" else "targeting " ^ pf) (s_type_path m))) ~sub p)
 		end
 	| Error.Error err ->
 		error_ext ctx err
 	| Arg.Bad msg ->
 		error ctx ("Error: " ^ msg) null_pos
+	| Failure msg when is_diagnostics com ->
+		handle_diagnostics ctx msg null_pos DKCompilerMessage;
 	| Failure msg when not Helper.is_debug_run ->
 		error ctx ("Error: " ^ msg) null_pos
+	| Globals.Ice (msg,backtrace) when is_diagnostics com ->
+		let s = make_ice_message com msg backtrace in
+		handle_diagnostics ctx s null_pos DKCompilerMessage
+	| Globals.Ice (msg,backtrace) when not Helper.is_debug_run ->
+		let s = make_ice_message com msg backtrace in
+		error ctx ("Error: " ^ s) null_pos
 	| Helper.HelpMessage msg ->
-		com.info msg null_pos
+		print_endline msg
 	| Parser.TypePath (p,c,is_import,pos) ->
 		DisplayOutput.handle_type_path_exception ctx p c is_import pos
 	| Parser.SyntaxCompletion(kind,subj) ->
@@ -448,12 +472,15 @@ with
 		error ctx ("Error: No completion point was found") null_pos
 	| DisplayException.DisplayException dex ->
 		DisplayOutput.handle_display_exception ctx dex
-	| Out_of_memory | EvalTypes.Sys_exit _ | Hlinterp.Sys_exit _ | DisplayProcessingGlobals.Completion _ as exc ->
+	| Abort | Out_of_memory | EvalTypes.Sys_exit _ | Hlinterp.Sys_exit _ | DisplayProcessingGlobals.Completion _ as exc ->
 		(* We don't want these to be caught by the catchall below *)
 		raise exc
 	| e when (try Sys.getenv "OCAMLRUNPARAM" <> "b" with _ -> true) && not Helper.is_debug_run ->
 		error ctx (Printexc.to_string e) null_pos
 
+let compile_safe ctx f =
+	try compile_safe ctx f with Abort -> ()
+
 let finalize ctx =
 	ctx.comm.flush ctx;
 	List.iter (fun lib -> lib#close) ctx.com.hxb_libs;
@@ -480,6 +507,8 @@ let catch_completion_and_exit ctx callbacks run =
 			i
 
 let process_actx ctx actx =
+	ctx.com.doinline <- ctx.com.display.dms_inline && not (Common.defined ctx.com Define.NoInline);
+	ctx.timer_ctx.measure_times <- (if actx.measure_times then Yes else No);
 	DisplayProcessing.process_display_arg ctx actx;
 	List.iter (fun s ->
 		ctx.com.warning WDeprecated [] s null_pos
@@ -506,23 +535,30 @@ let compile_ctx callbacks ctx =
 	end else
 		catch_completion_and_exit ctx callbacks run
 
-let create_context comm cs compilation_step params = {
-	com = Common.create compilation_step cs version params (DisplayTypes.DisplayMode.create !Parser.display_mode);
+let create_context comm cs timer_ctx compilation_step params = {
+	com = Common.create timer_ctx compilation_step cs {
+		version = version;
+		major = version_major;
+		minor = version_minor;
+		revision = version_revision;
+		pre = version_pre;
+		extra = Version.version_extra;
+	} params (DisplayTypes.DisplayMode.create DMNone);
 	messages = [];
 	has_next = false;
 	has_error = false;
 	comm = comm;
 	runtime_args = [];
+	timer_ctx = timer_ctx;
 }
 
 module HighLevel = struct
-	let add_libs libs args cs has_display =
+	let add_libs timer_ctx libs args cs has_display =
 		let global_repo = List.exists (fun a -> a = "--haxelib-global") args in
 		let fail msg =
 			raise (Arg.Bad msg)
 		in
 		let call_haxelib() =
-			let t = Timer.timer ["haxelib"] in
 			let cmd = "haxelib" ^ (if global_repo then " --global" else "") ^ " path " ^ String.concat " " libs in
 			let pin, pout, perr = Unix.open_process_full cmd (Unix.environment()) in
 			let lines = Std.input_list pin in
@@ -532,9 +568,11 @@ module HighLevel = struct
 				| [], [] -> "Failed to call haxelib (command not found ?)"
 				| [], [s] when ExtString.String.ends_with (ExtString.String.strip s) "Module not found: path" -> "The haxelib command has been strip'ed, please install it again"
 				| _ -> String.concat "\n" (lines@err));
-			t();
 			lines
 		in
+		let call_haxelib () =
+			Timer.time timer_ctx ["haxelib"] call_haxelib ()
+		in
 		match libs with
 		| [] ->
 			[]
@@ -568,11 +606,12 @@ module HighLevel = struct
 			lines
 
 	(* Returns a list of contexts, but doesn't do anything yet *)
-	let process_params server_api create each_args has_display is_server args =
+	let process_params server_api timer_ctx create each_args has_display is_server args =
 		(* We want the loop below to actually see all the --each params, so let's prepend them *)
 		let args = !each_args @ args in
 		let added_libs = Hashtbl.create 0 in
 		let server_mode = ref SMNone in
+		let hxml_stack = ref [] in
 		let create_context args =
 			let ctx = create (server_api.on_context_create()) args in
 			ctx
@@ -597,7 +636,8 @@ module HighLevel = struct
 				loop acc l
 			| "--cwd" :: dir :: l | "-C" :: dir :: l ->
 				(* we need to change it immediately since it will affect hxml loading *)
-				(try Unix.chdir dir with _ -> raise (Arg.Bad ("Invalid directory: " ^ dir)));
+				(* Exceptions are ignored there to let arg parsing do the error handling in expected order *)
+				(try Unix.chdir dir with _ -> ());
 				(* Push the --cwd arg so the arg processor know we did something. *)
 				loop (dir :: "--cwd" :: acc) l
 			| "--connect" :: hp :: l ->
@@ -624,13 +664,18 @@ module HighLevel = struct
 				let libs,args = find_subsequent_libs [name] args in
 				let libs = List.filter (fun l -> not (Hashtbl.mem added_libs l)) libs in
 				List.iter (fun l -> Hashtbl.add added_libs l ()) libs;
-				let lines = add_libs libs args server_api.cache has_display in
+				let lines = add_libs timer_ctx libs args server_api.cache has_display in
 				loop acc (lines @ args)
 			| ("--jvm" | "-jvm" as arg) :: dir :: args ->
 				loop_lib arg dir "hxjava" acc args
 			| arg :: l ->
 				match List.rev (ExtString.String.nsplit arg ".") with
 				| "hxml" :: _ :: _ when (match acc with "-cmd" :: _ | "--cmd" :: _ -> false | _ -> true) ->
+					let full_path = try Extc.get_full_path arg with Failure(_) -> raise (Arg.Bad (Printf.sprintf "File not found: %s" arg)) in
+					if List.mem full_path !hxml_stack then
+						raise (Arg.Bad (Printf.sprintf "Duplicate hxml inclusion: %s" full_path))
+					else
+						hxml_stack := full_path :: !hxml_stack;
 					let acc, l = (try acc, Helper.parse_hxml arg @ l with Not_found -> (arg ^ " (file not found)") :: acc, l) in
 					loop acc l
 				| _ ->
@@ -664,7 +709,8 @@ module HighLevel = struct
 		end
 
 	let entry server_api comm args =
-		let create = create_context comm server_api.cache in
+		let timer_ctx = Timer.make_context (Timer.make ["other"]) in
+		let create = create_context comm server_api.cache timer_ctx in
 		let each_args = ref [] in
 		let curdir = Unix.getcwd () in
 		let has_display = ref false in
@@ -678,7 +724,7 @@ module HighLevel = struct
 		in
 		let rec loop args =
 			let args,server_mode,ctx = try
-				process_params server_api create each_args !has_display comm.is_server args
+				process_params server_api timer_ctx create each_args !has_display comm.is_server args
 			with Arg.Bad msg ->
 				let ctx = create 0 args in
 				error ctx ("Error: " ^ msg) null_pos;
@@ -701,5 +747,5 @@ module HighLevel = struct
 				code
 		in
 		let code = loop args in
-		comm.exit code
+		comm.exit timer_ctx code
 end

+ 7 - 8
src/compiler/displayOutput.ml

@@ -1,7 +1,6 @@
 open Globals
 open Ast
 open Common
-open Timer
 open DisplayTypes.DisplayMode
 open DisplayTypes.CompletionResultKind
 open CompletionItem
@@ -24,14 +23,15 @@ let htmlescape s =
 	let s = String.concat "&quot;" (ExtString.String.nsplit s "\"") in
 	s
 
-let get_timer_fields start_time =
+let get_timer_fields timer_ctx =
+	let open Timer in
 	let tot = ref 0. in
-	Hashtbl.iter (fun _ t -> tot := !tot +. t.total) Timer.htimers;
-	let fields = [("@TOTAL", Printf.sprintf "%.3fs" (get_time() -. start_time))] in
+	Hashtbl.iter (fun _ t -> tot := !tot +. t.total) timer_ctx.timer_lut;
+	let fields = [("@TOTAL", Printf.sprintf "%.3fs" (Extc.time() -. timer_ctx.start_time))] in
 	if !tot > 0. then
 		Hashtbl.fold (fun _ t acc ->
 			((String.concat "." t.id),(Printf.sprintf "%.3fs (%.0f%%)" t.total (t.total *. 100. /. !tot))) :: acc
-		) Timer.htimers fields
+		) timer_ctx.timer_lut fields
 	else
 		fields
 
@@ -272,11 +272,10 @@ let handle_display_exception_old ctx dex = match dex with
 		raise (Completion (String.concat "." pack))
 	| DisplayFields r ->
 		DisplayPosition.display_position#reset;
-		let fields = if !Timer.measure_times then begin
-			Timer.close_times();
+		let fields = if ctx.com.timer_ctx.measure_times = Yes then begin
 			(List.map (fun (name,value) ->
 				CompletionItem.make_ci_timer ("@TIME " ^ name) value
-			) (get_timer_fields !Helper.start_time)) @ r.fitems
+			) (get_timer_fields ctx.com.timer_ctx)) @ r.fitems
 		end else
 			r.fitems
 		in

+ 45 - 52
src/compiler/displayProcessing.ml

@@ -20,7 +20,7 @@ let handle_display_argument_old com file_pos actx =
 		raise (Completion (DisplayOutput.print_keywords ()))
 	| "memory" ->
 		actx.did_something <- true;
-		(try Memory.display_memory com with e -> prerr_endline (Printexc.get_backtrace ()));
+		(try DisplayMemory.display_memory com with e -> prerr_endline (Printexc.get_backtrace ()));
 	| "diagnostics" ->
 		com.report_mode <- RMLegacyDiagnostics []
 	| _ ->
@@ -29,7 +29,6 @@ let handle_display_argument_old com file_pos actx =
 		let file_unique = com.file_keys#get file in
 		let pos, smode = try ExtString.String.split pos "@" with _ -> pos,"" in
 		let create mode =
-			Parser.display_mode := mode;
 			DisplayTypes.DisplayMode.create mode
 		in
 		let dm = match smode with
@@ -48,7 +47,7 @@ let handle_display_argument_old com file_pos actx =
 			| "diagnostics" ->
 				com.report_mode <- RMLegacyDiagnostics [file_unique];
 				let dm = create DMNone in
-				{dm with dms_display_file_policy = DFPOnly; dms_per_file = true; dms_populate_cache = !ServerConfig.populate_cache_from_display}
+				{dm with dms_display_file_policy = DFPOnly; dms_per_file = true; dms_populate_cache = false}
 			| "statistics" ->
 				com.report_mode <- RMStatistics;
 				let dm = create DMNone in
@@ -81,7 +80,7 @@ let process_display_arg ctx actx =
 		if String.length input > 0 && (input.[0] = '[' || input.[0] = '{') then begin
 			actx.did_something <- true;
 			actx.force_typing <- true;
-			DisplayJson.parse_input ctx.com input Timer.measure_times
+			DisplayJson.parse_input ctx.com input
 		end else
 			handle_display_argument_old ctx.com input actx;
 	| None ->
@@ -96,24 +95,13 @@ let process_display_configuration ctx =
 			add_diagnostics_message ?depth com s p DKCompilerMessage Information
 		);
 		com.warning <- (fun ?(depth = 0) ?from_macro w options s p ->
-			match Warning.get_mode w (com.warning_options @ options) with
+			match Warning.get_mode w (options @ com.warning_options) with
 			| WMEnable ->
 				let wobj = Warning.warning_obj w in
 				add_diagnostics_message ~depth ~code:(Some wobj.w_name) com s p DKCompilerMessage Warning
 			| WMDisable ->
 				()
 		);
-	end;
-	Lexer.old_format := Common.defined com Define.OldErrorFormat;
-	if !Lexer.old_format && !Parser.in_display then begin
-		let p = DisplayPosition.display_position#get in
-		(* convert byte position to utf8 position *)
-		try
-			let content = Std.input_file ~bin:true (Path.get_real_path p.pfile) in
-			let pos = Extlib_leftovers.UTF8.length (String.sub content 0 p.pmin) in
-			DisplayPosition.display_position#set { p with pmin = pos; pmax = pos }
-		with _ ->
-			() (* ignore *)
 	end
 
 let process_display_file com actx =
@@ -143,7 +131,7 @@ let process_display_file com actx =
 			DPKNone
 		| DFPOnly when (DisplayPosition.display_position#get).pfile = file_input_marker ->
 			actx.classes <- [];
-			com.main.main_class <- None;
+			com.main.main_path <- None;
 			begin match com.file_contents with
 			| [_, Some input] ->
 				com.file_contents <- [];
@@ -154,38 +142,43 @@ let process_display_file com actx =
 		| dfp ->
 			if dfp = DFPOnly then begin
 				actx.classes <- [];
-				com.main.main_class <- None;
+				com.main.main_path <- None;
 			end;
-			let real = Path.get_real_path (DisplayPosition.display_position#get).pfile in
-			let path = match get_module_path_from_file_path com real with
-			| Some path ->
-				if com.display.dms_kind = DMPackage then DisplayException.raise_package (fst path);
-				let path = match ExtString.String.nsplit (snd path) "." with
-					| [name;"macro"] ->
-						(* If we have a .macro.hx path, don't add the file to classes because the compiler won't find it.
-						   This can happen if we're completing in such a file. *)
-						DPKMacro (fst path,name)
-					| [name] ->
-						actx.classes <- path :: actx.classes;
-						DPKNormal path
-					| [name;target] ->
-						let path = fst path, name in
-						actx.classes <- path :: actx.classes;
-						DPKNormal path
-					| e ->
-						die "" __LOC__
+			let dpk = List.map (fun file_key ->
+				let real = Path.get_real_path (Path.UniqueKey.to_string file_key) in
+				let dpk = match get_module_path_from_file_path com real with
+				| Some path ->
+					if com.display.dms_kind = DMPackage then DisplayException.raise_package (fst path);
+					let dpk = match ExtString.String.nsplit (snd path) "." with
+						| [name;"macro"] ->
+							(* If we have a .macro.hx path, don't add the file to classes because the compiler won't find it.
+								 This can happen if we're completing in such a file. *)
+							DPKMacro (fst path,name)
+						| [name] ->
+							actx.classes <- path :: actx.classes;
+							DPKNormal path
+						| [name;target] ->
+							let path = fst path, name in
+							actx.classes <- path :: actx.classes;
+							DPKNormal path
+						| _ ->
+							failwith ("Invalid display file '" ^ real ^ "'")
+					in
+					dpk
+				| None ->
+					if not (Sys.file_exists real) then failwith "Display file does not exist";
+					(match List.rev (ExtString.String.nsplit real Path.path_sep) with
+					| file :: _ when file.[0] >= 'a' && file.[0] <= 'z' -> failwith ("Display file '" ^ file ^ "' should not start with a lowercase letter")
+					| _ -> ());
+					DPKDirect real
 				in
-				path
-			| None ->
-				if not (Sys.file_exists real) then failwith "Display file does not exist";
-				(match List.rev (ExtString.String.nsplit real Path.path_sep) with
-				| file :: _ when file.[0] >= 'a' && file.[0] <= 'z' -> failwith ("Display file '" ^ file ^ "' should not start with a lowercase letter")
-				| _ -> ());
-				DPKDirect real
-			in
-			Common.log com ("Display file : " ^ real);
+				Common.log com ("Display file : " ^ real);
+				dpk
+			) DisplayPosition.display_position#get_files in
 			Common.log com ("Classes found : ["  ^ (String.concat "," (List.map s_type_path actx.classes)) ^ "]");
-			path
+			match dpk with
+				| [dfile] -> dfile
+				| _ -> DPKNone
 
 (* 3. Loaders for display file that might be called *)
 
@@ -244,7 +237,7 @@ let load_display_file_standalone (ctx : Typecore.typer) file =
 let load_display_content_standalone (ctx : Typecore.typer) input =
 	let com = ctx.com in
 	let file = file_input_marker in
-	let p = {pfile = file; pmin = 0; pmax = 0} in
+	let p = file_pos file in
 	let parsed = TypeloadParse.parse_file_from_string com file p input in
 	let pack,decls = TypeloadParse.handle_parser_result com p parsed in
 	ignore(TypeloadModule.type_module ctx.com ctx.g (pack,"?DISPLAY") file ~dont_check_path:true decls p)
@@ -271,7 +264,7 @@ let maybe_load_display_file_before_typing tctx display_file_dot_path = match dis
 let handle_display_after_typing ctx tctx display_file_dot_path =
 	let com = ctx.com in
 	if ctx.com.display.dms_kind = DMNone && ctx.has_error then raise Abort;
-	begin match ctx.com.display.dms_kind,!Parser.delayed_syntax_completion with
+	begin match ctx.com.display.dms_kind,Atomic.get ctx.com.parser_state.delayed_syntax_completion with
 		| DMDefault,Some(kind,subj) -> DisplayOutput.handle_syntax_completion com kind subj
 		| _ -> ()
 	end;
@@ -310,9 +303,9 @@ let process_global_display_mode com tctx =
 	promote_type_hints tctx;
 	match com.display.dms_kind with
 	| DMUsage (with_definition,_,_) ->
-		FindReferences.find_references tctx com with_definition
+		FindReferences.find_references com with_definition
 	| DMImplementation ->
-		FindReferences.find_implementations tctx com
+		FindReferences.find_implementations com
 	| DMModuleSymbols filter ->
 		let open CompilationCache in
 		let cs = com.cs in
@@ -343,7 +336,7 @@ let handle_display_after_finalization ctx tctx display_file_dot_path =
 		| None -> ()
 		| Some mctx ->
 			(* We don't need a full macro flush here because we're not going to run any macros. *)
-			let _, types, modules = Finalization.generate mctx in
+			let _, types, modules = Finalization.generate mctx (Finalization.maybe_load_main mctx) in
 			mctx.Typecore.com.types <- types;
 			mctx.Typecore.com.Common.modules <- modules
 	end;
@@ -354,7 +347,7 @@ let handle_display_after_finalization ctx tctx display_file_dot_path =
 	| RMDiagnostics _ ->
 		DisplayOutput.emit_diagnostics com
 	| RMStatistics ->
-		DisplayOutput.emit_statistics tctx
+		DisplayOutput.emit_statistics com
 	| RMNone ->
 		()
 	end

+ 5 - 0
src/compiler/dune

@@ -0,0 +1,5 @@
+(rule
+	(deps (env_var ADD_REVISION) (universe))
+	(targets version.ml)
+	(action (with-stdout-to version.ml (run ../prebuild.exe version)))
+)

+ 63 - 39
src/compiler/generate.ml

@@ -18,10 +18,20 @@ let check_auxiliary_output com actx =
 		| Some file ->
 			Common.log com ("Generating json : " ^ file);
 			Path.mkdir_from_path file;
-			Genjson.generate com.types file
+			Genjson.generate com.timer_ctx com.types file
 	end
 
-let export_hxb com config cc platform zip m =
+let create_writer com config =
+	let anon_identification = new tanon_identification in
+	let warn w s p = com.Common.warning w com.warning_options s p in
+	let writer = HxbWriter.create config warn anon_identification in
+	writer,(fun () ->
+		let out = IO.output_string () in
+		HxbWriter.export writer out;
+		IO.close_out out
+	)
+
+let export_hxb from_cache com config cc platform m =
 	let open HxbData in
 	match m.m_extra.m_kind with
 		| MCode | MMacro | MFake | MExtern -> begin
@@ -29,8 +39,8 @@ let export_hxb com config cc platform zip m =
 			let l = platform :: (fst m.m_path @ [snd m.m_path]) in
 			let path = (String.concat "/" l) ^ ".hxb" in
 
-			try
-				let hxb_cache = cc#get_hxb_module m.m_path in
+			if from_cache then begin
+				let hxb_cache = try cc#get_hxb_module m.m_path with Not_found -> raise Abort in
 				let out = IO.output_string () in
 				write_header out;
 				List.iter (fun (kind,data) ->
@@ -38,18 +48,16 @@ let export_hxb com config cc platform zip m =
 					IO.nwrite out data
 				) hxb_cache.mc_chunks;
 				let data = IO.close_out out in
-				zip#add_entry data path;
-			with Not_found ->
-				let anon_identification = new tanon_identification in
-				let warn w s p = com.Common.warning w com.warning_options s p in
-				let writer = HxbWriter.create config warn anon_identification in
+				Some (path,data)
+			end else begin
+				let writer,close = create_writer com config in
 				HxbWriter.write_module writer m;
-				let out = IO.output_string () in
-				HxbWriter.export writer out;
-				zip#add_entry (IO.close_out out) path;
+				let bytes = close () in
+				Some (path,bytes)
+			end
 		end
 	| _ ->
-		()
+		None
 
 let check_hxb_output ctx config =
 	let open HxbWriterConfig in
@@ -57,38 +65,54 @@ let check_hxb_output ctx config =
 	let match_path_list l sl_path =
 		List.exists (fun sl -> Ast.match_path true sl_path sl) l
 	in
-	let try_write () =
+	let try_write from_cache =
 		let path = config.HxbWriterConfig.archive_path in
 		let path = Str.global_replace (Str.regexp "\\$target") (platform_name ctx.com.platform) path in
-		let t = Timer.timer ["generate";"hxb"] in
+		let t = Timer.start_timer ctx.timer_ctx ["generate";"hxb"] in
 		Path.mkdir_from_path path;
 		let zip = new Zip_output.zip_output path 6 in
 		let export com config =
 			let cc = CommonCache.get_cache com in
 			let target = Common.platform_name_macro com in
-			List.iter (fun m ->
-				let t = Timer.timer ["generate";"hxb";s_type_path m.m_path] in
+			let f m =
 				let sl_path = fst m.m_path @ [snd m.m_path] in
 				if not (match_path_list config.exclude sl_path) || match_path_list config.include' sl_path then
-					Std.finally t (export_hxb com config cc target zip) m
-			) com.modules;
+					Timer.time ctx.timer_ctx ["generate";"hxb";s_type_path m.m_path] (export_hxb from_cache com config cc target) m
+				else
+					None
+			in
+			let a_in = Array.of_list com.modules in
+			let a_out = Parallel.run_in_new_pool com.timer_ctx (fun pool ->
+				Parallel.ParallelArray.map pool f a_in None
+			) in
+			Array.iter (function
+				| None ->
+					()
+				| Some(path,bytes) ->
+					zip#add_entry bytes path
+			) a_out
 		in
 		Std.finally (fun () ->
 			zip#close;
 			t()
 		) (fun () ->
-			if  config.target_config.generate then
-				export com config.target_config;
-			begin match com.get_macros() with
-				| Some mcom when config.macro_config.generate ->
-					export mcom config.macro_config
-				| _ ->
-					()
+			if config.target_config.generate then begin
+				export com config.target_config
+			end;
+
+			if config.macro_config.generate then begin
+				match com.get_macros() with
+					| Some mcom ->
+						export mcom config.macro_config;
+					| _ ->
+						()
 			end;
 		) ()
 	in
 	try
-		try_write ()
+		(* This Abort case shouldn't happen, unless some modules are not stored in hxb cache (which should not be the case currently) *)
+		if ctx.comm.is_server then try try_write true with Abort -> try_write false
+		else try_write false
 	with Sys_error s ->
 		CompilationContext.error ctx (Printf.sprintf "Could not write to %s: %s" config.archive_path s) null_pos
 
@@ -107,14 +131,16 @@ let delete_file f = try Sys.remove f with _ -> ()
 let maybe_generate_dump ctx tctx =
 	let com = tctx.Typecore.com in
 	if Common.defined com Define.Dump then begin
-		Codegen.Dump.dump_types com;
-		Option.may Codegen.Dump.dump_types (com.get_macros())
+		Timer.time ctx.timer_ctx ["generate";"dump"] (fun () ->
+			Dump.dump_types com;
+			Option.may Dump.dump_types (com.get_macros());
+		) ();
 	end;
 	if Common.defined com Define.DumpDependencies then begin
-		Codegen.Dump.dump_dependencies com;
+		Dump.dump_dependencies com;
 		if not com.is_macro_context then match tctx.Typecore.g.Typecore.macros with
 			| None -> ()
-			| Some(_,ctx) -> Codegen.Dump.dump_dependencies ~target_override:(Some "macro") ctx.Typecore.com
+			| Some(_,ctx) -> Dump.dump_dependencies ~target_override:(Some "macro") ctx.Typecore.com
 	end
 
 let generate ctx tctx ext actx =
@@ -122,7 +148,7 @@ let generate ctx tctx ext actx =
 	(* check file extension. In case of wrong commandline, we don't want
 		to accidentaly delete a source file. *)
 	if Path.file_extension com.file = ext then delete_file com.file;
-	if com.platform = Flash || com.platform = Cpp || com.platform = Hl then List.iter (Codegen.fix_overrides com) com.types;
+	if com.platform = Flash || com.platform = Cpp || com.platform = Hl then List.iter (FixOverrides.fix_overrides com) com.types;
 	begin match com.platform with
 		| Neko | Hl | Eval when actx.interp -> ()
 		| Cpp when Common.defined com Define.Cppia -> ()
@@ -130,7 +156,7 @@ let generate ctx tctx ext actx =
 		| _ -> Path.mkdir_from_path com.file
 	end;
 	if actx.interp then begin
-		let timer = Timer.timer ["interp"] in
+		let timer = Timer.start_timer ctx.timer_ctx ["interp"] in
 		let old = tctx.com.args in
 		tctx.com.args <- ctx.runtime_args;
 		let restore () =
@@ -146,11 +172,11 @@ let generate ctx tctx ext actx =
 			with Not_found ->
 				None
 			in
-			Genswf.generate header,"swf"
+			Genswf.generate header com.Common.native_libs.swf_libs com.Common.flash_version,"swf"
 		| Neko ->
-			Genneko.generate,"neko"
+			Genneko.generate com.neko_lib_paths,"neko"
 		| Js ->
-			Genjs.generate,"js"
+			Genjs.generate com.js_gen,"js"
 		| Lua ->
 			Genlua.generate,"lua"
 		| Php ->
@@ -172,8 +198,6 @@ let generate ctx tctx ext actx =
 		if name = "" then ()
 		else begin
 			Common.log com ("Generating " ^ name ^ ": " ^ com.file);
-			let t = Timer.timer ["generate";name] in
-			generate com;
-			t()
+			Timer.time com.timer_ctx ["generate";name] generate (Common.to_gctx com);
 		end
 	end

+ 2 - 3
src/compiler/haxe.ml

@@ -43,12 +43,11 @@
 *)
 open Server
 
-let other = Timer.timer ["other"];;
+;;
 Sys.catch_break true;
 
 let args = List.tl (Array.to_list Sys.argv) in
 set_binary_mode_out stdout true;
 set_binary_mode_out stderr true;
 let sctx = ServerCompilationContext.create false in
-Server.process sctx (Communication.create_stdio ()) args;
-other()
+Server.process sctx (Communication.create_stdio ()) args;

+ 0 - 2
src/compiler/helper.ml

@@ -3,8 +3,6 @@ exception HelpMessage of string
 
 let is_debug_run = try Sys.getenv "HAXEDEBUG" = "1" with _ -> false
 
-let start_time = ref (Timer.get_time())
-
 let prompt = ref false
 
 let expand_env ?(h=None) path  =

+ 16 - 10
src/compiler/hxb/hxbData.ml

@@ -10,10 +10,10 @@ exception HxbFailure of string
 	EN = enum
 	AB = abstract
 	TD = typedef
-	AN = anon
+	OB = anonymous object
 	CF = class field
 	EF = enum field
-	AF = anon field
+	OF = object field
 	EX = expression
 	EO = end of (Types | Fields | Module)
 	..F = forward definition
@@ -24,16 +24,18 @@ exception HxbFailure of string
 type chunk_kind =
 	| STR (* string pool *)
 	| DOC (* doc pool *)
-	| MDF (* module foward *)
+	| MDF (* module forward *)
 	| MTF (* module types forward *)
 	(* Module type references *)
-	| MDR (* module references *)
+	| IMP (* imports *)
 	| CLR (* class references *)
 	| ENR (* enum references *)
 	| ABR (* abstract references *)
 	| TDR (* typedef references *)
-	(* Field references *)
-	| AFR (* anon field references *)
+	(* Anonymous objects *)
+	| OFR (* object field references *)
+	| OFD (* object field definitions *)
+	| OBD (* object definitions *)
 	(* Own module type definitions *)
 	| CLD (* class definition *)
 	| END (* enum definition *)
@@ -66,12 +68,14 @@ let string_of_chunk_kind = function
 	| DOC -> "DOC"
 	| MDF -> "MDF"
 	| MTF -> "MTF"
-	| MDR -> "MDR"
+	| IMP -> "IMP"
 	| CLR -> "CLR"
 	| ENR -> "ENR"
 	| ABR -> "ABR"
 	| TDR -> "TDR"
-	| AFR -> "AFR"
+	| OFR -> "OFR"
+	| OFD -> "OFD"
+	| OBD -> "OBD"
 	| EFR -> "EFR"
 	| CFR -> "CFR"
 	| CLD -> "CLD"
@@ -91,12 +95,14 @@ let chunk_kind_of_string = function
 	| "DOC" -> DOC
 	| "MDF" -> MDF
 	| "MTF" -> MTF
-	| "MDR" -> MDR
+	| "IMP" -> IMP
 	| "CLR" -> CLR
 	| "ENR" -> ENR
 	| "ABR" -> ABR
 	| "TDR" -> TDR
-	| "AFR" -> AFR
+	| "OFR" -> OFR
+	| "OFD" -> OFD
+	| "OBD" -> OBD
 	| "EFR" -> EFR
 	| "CFR" -> CFR
 	| "CLD" -> CLD

+ 26 - 20
src/compiler/hxb/hxbLib.ml

@@ -2,7 +2,7 @@ open Globals
 open Common
 open ExtString
 
-class hxb_library file_path = object(self)
+class hxb_library timer_ctx file_path hxb_times = object(self)
 	inherit abstract_hxb_lib
 	val zip = lazy (Zip.open_in file_path)
 
@@ -10,34 +10,37 @@ class hxb_library file_path = object(self)
 	val modules = Hashtbl.create 0
 	val mutable closed = false
 	val mutable loaded = false
+	val mutable string_pool : string array option = None
+	val mutable macro_string_pool : string array option = None
+
+	method private do_load =
+		List.iter (function
+		| ({ Zip.is_directory = false; Zip.filename = filename } as entry) when String.ends_with filename ".hxb" ->
+			let pack = String.nsplit filename "/" in
+			begin match List.rev pack with
+				| [] -> ()
+				| name :: pack ->
+					let name = String.sub name 0 (String.length name - 4) in
+					let pack = List.rev pack in
+					Hashtbl.add modules (pack,name) (filename,entry);
+				end
+		| _ -> ()
+	) (Zip.entries (Lazy.force zip));
 
 	method load =
 		if not loaded then begin
 			loaded <- true;
-			let close = Timer.timer ["hxblib";"read"] in
-			List.iter (function
-				| ({ Zip.is_directory = false; Zip.filename = filename } as entry) when String.ends_with filename ".hxb" ->
-					let pack = String.nsplit filename "/" in
-					begin match List.rev pack with
-						| [] -> ()
-						| name :: pack ->
-							let name = String.sub name 0 (String.length name - 4) in
-							let pack = List.rev pack in
-							Hashtbl.add modules (pack,name) (filename,entry);
-						end
-				| _ -> ()
-			) (Zip.entries (Lazy.force zip));
-			close();
+			Timer.time timer_ctx ["hxblib";"read"] (fun () -> self#do_load) ()
 		end
 
 	method get_bytes (target : string) (path : path) =
 		try
 			let path = (target :: fst path,snd path) in
 			let (filename,entry) = Hashtbl.find modules path in
-			let close = Timer.timer ["hxblib";"get bytes"] in
-			let zip = Lazy.force zip in
-			let data = Zip.read_entry zip entry in
-			close();
+			let data = Timer.time timer_ctx ["hxblib";"get bytes"] (fun () ->
+				let zip = Lazy.force zip in
+				Zip.read_entry zip entry
+			) () in
 			Some (Bytes.unsafe_of_string data)
 		with Not_found ->
 			None
@@ -49,6 +52,9 @@ class hxb_library file_path = object(self)
 		end
 
 	method get_file_path = file_path
+	method get_string_pool target =
+		if target = "macro" && Option.is_some macro_string_pool then macro_string_pool
+		else string_pool
 end
 
 
@@ -60,4 +66,4 @@ let create_hxb_lib com file_path =
 	with Not_found ->
 		failwith ("hxb lib " ^ file_path ^ " not found")
 	in
-	new hxb_library file
+	new hxb_library com.timer_ctx file (if Common.defined com Define.HxbTimes then Some com.timer_ctx else None)

+ 243 - 117
src/compiler/hxb/hxbReader.ml

@@ -142,29 +142,31 @@ let read_leb128 ch =
 
 let dump_stats name stats =
 	print_endline (Printf.sprintf "hxb_reader stats for %s" name);
-	print_endline (Printf.sprintf "  modules partially restored: %i" (!(stats.modules_fully_restored) - !(stats.modules_partially_restored)));
+	print_endline (Printf.sprintf "  modules partially restored: %i" (!(stats.modules_partially_restored) - !(stats.modules_fully_restored)));
 	print_endline (Printf.sprintf "  modules fully restored: %i" !(stats.modules_fully_restored));
 
 class hxb_reader
 	(mpath : path)
 	(stats : hxb_reader_stats)
+	(timer_ctx : Timer.timer_context option)
 = object(self)
 	val mutable api = Obj.magic ""
+	val mutable full_restore = true
 	val mutable current_module = null_module
 
 	val mutable ch = BytesWithPosition.create (Bytes.create 0)
 	val mutable string_pool = Array.make 0 ""
 	val mutable doc_pool = Array.make 0 ""
 
-	val mutable classes = Array.make 0 null_class
-	val mutable abstracts = Array.make 0 null_abstract
-	val mutable enums = Array.make 0 null_enum
-	val mutable typedefs = Array.make 0 null_typedef
+	val mutable classes = Array.make 0 (Lazy.from_val null_class)
+	val mutable abstracts = Array.make 0 (Lazy.from_val null_abstract)
+	val mutable enums = Array.make 0 (Lazy.from_val null_enum)
+	val mutable typedefs = Array.make 0 (Lazy.from_val null_typedef)
 	val mutable anons = Array.make 0 null_tanon
 	val mutable anon_fields = Array.make 0 null_field
 	val mutable tmonos = Array.make 0 (mk_mono())
-	val mutable class_fields = Array.make 0 null_field
-	val mutable enum_fields = Array.make 0 null_enum_field
+	val mutable class_fields = Array.make 0 (Lazy.from_val null_field)
+	val mutable enum_fields = Array.make 0 (Lazy.from_val null_enum_field)
 
 	val mutable type_type_parameters = Array.make 0 (mk_type_param null_class TPHType None None)
 	val mutable field_type_parameters = Array.make 0 (mk_type_param null_class TPHMethod None None)
@@ -175,11 +177,20 @@ class hxb_reader
 
 	method resolve_type pack mname tname =
 		try
-			api#resolve_type pack mname tname
+			let mt = api#resolve_type pack mname tname in
+			if not full_restore then begin
+				let mdep = (t_infos mt).mt_module in
+				if mdep != null_module && current_module.m_path != mdep.m_path then
+					current_module.m_extra.m_display_deps <- Some (PMap.add mdep.m_id (create_dependency mdep MDepFromTyping) (Option.get current_module.m_extra.m_display_deps))
+			end;
+			mt
 		with Not_found ->
 			dump_backtrace();
 			error (Printf.sprintf "[HXB] [%s] Cannot resolve type %s" (s_type_path current_module.m_path) (s_type_path ((pack @ [mname]),tname)))
 
+	method make_lazy_type_dynamic f : Type.t =
+		api#make_lazy_type t_dynamic f
+
 	(* Primitives *)
 
 	method read_i32 =
@@ -290,10 +301,12 @@ class hxb_reader
 		typedefs.(read_uleb128 ch)
 
 	method read_field_ref =
-		class_fields.(read_uleb128 ch)
+		let cf = class_fields.(read_uleb128 ch) in
+		Lazy.force cf
 
 	method read_enum_field_ref =
-		enum_fields.(read_uleb128 ch)
+		let ef = enum_fields.(read_uleb128 ch) in
+		Lazy.force ef
 
 	method read_anon_ref =
 		match read_byte ch with
@@ -301,7 +314,8 @@ class hxb_reader
 			anons.(read_uleb128 ch)
 		| 1 ->
 			let an = anons.(read_uleb128 ch) in
-			self#read_anon an
+			self#read_anon an;
+			an
 		| _ ->
 			assert false
 
@@ -733,15 +747,23 @@ class hxb_reader
 			local_type_parameters.(k).ttp_type
 		| 4 ->
 			t_dynamic
+		| 5 ->
+			let path = self#read_path in
+			(mk_type_param { null_class with cl_path = path } TPHUnbound None None).ttp_type
 		| 10 ->
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			c.cl_type
 		| 11 ->
 			let en = self#read_enum_ref in
-			en.e_type
+			self#make_lazy_type_dynamic (fun () ->
+				(Lazy.force en).e_type
+			)
 		| 12 ->
 			let a = self#read_abstract_ref in
-			TType(abstract_module_type a [],[])
+			(* self#make_lazy_type_dynamic (fun () -> *)
+				TType(abstract_module_type (Lazy.force a) [],[])
+			(* ) *)
 		| 13 ->
 			let e = self#read_expr in
 			let c = {null_class with cl_kind = KExpr e; cl_module = current_module } in
@@ -800,68 +822,96 @@ class hxb_reader
 			TFun(args,ret)
 		| 40 ->
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			TInst(c,[])
 		| 41 ->
 			let c = self#read_class_ref in
 			let t1 = self#read_type_instance in
+			let c = Lazy.force c in
 			TInst(c,[t1])
 		| 42 ->
 			let c = self#read_class_ref in
 			let t1 = self#read_type_instance in
 			let t2 = self#read_type_instance in
+			let c = Lazy.force c in
 			TInst(c,[t1;t2])
 		| 49 ->
 			let c = self#read_class_ref in
 			let tl = self#read_types in
+			let c = Lazy.force c in
 			TInst(c,tl)
 		| 50 ->
 			let en = self#read_enum_ref in
-			TEnum(en,[])
+			self#make_lazy_type_dynamic (fun () ->
+				TEnum(Lazy.force en,[])
+			)
 		| 51 ->
 			let en = self#read_enum_ref in
 			let t1 = self#read_type_instance in
-			TEnum(en,[t1])
+			self#make_lazy_type_dynamic (fun () ->
+				TEnum(Lazy.force en,[t1])
+			)
 		| 52 ->
 			let en = self#read_enum_ref in
 			let t1 = self#read_type_instance in
 			let t2 = self#read_type_instance in
-			TEnum(en,[t1;t2])
+			self#make_lazy_type_dynamic (fun () ->
+				TEnum(Lazy.force en,[t1;t2])
+			)
 		| 59 ->
 			let e = self#read_enum_ref in
 			let tl = self#read_types in
-			TEnum(e,tl)
+			self#make_lazy_type_dynamic (fun () ->
+				TEnum(Lazy.force e,tl)
+			)
 		| 60 ->
 			let td = self#read_typedef_ref in
-			TType(td,[])
+			self#make_lazy_type_dynamic (fun () ->
+				TType(Lazy.force td,[])
+			);
 		| 61 ->
 			let td = self#read_typedef_ref in
 			let t1 = self#read_type_instance in
-			TType(td,[t1])
+			self#make_lazy_type_dynamic (fun () ->
+				TType(Lazy.force td,[t1])
+			)
 		| 62 ->
 			let td = self#read_typedef_ref in
 			let t1 = self#read_type_instance in
 			let t2 = self#read_type_instance in
-			TType(td,[t1;t2])
+			self#make_lazy_type_dynamic (fun () ->
+				TType(Lazy.force td,[t1;t2])
+			)
 		| 69 ->
 			let t = self#read_typedef_ref in
 			let tl = self#read_types in
-			TType(t,tl)
+			self#make_lazy_type_dynamic (fun () ->
+				TType(Lazy.force t,tl)
+			)
 		| 70 ->
 			let a = self#read_abstract_ref in
-			TAbstract(a,[])
+			(* self#make_lazy_type_dynamic (fun () -> *)
+				TAbstract(Lazy.force a,[])
+			(* ) *)
 		| 71 ->
 			let a = self#read_abstract_ref in
 			let t1 = self#read_type_instance in
-			TAbstract(a,[t1])
+			(* self#make_lazy_type_dynamic (fun () -> *)
+				TAbstract(Lazy.force a,[t1])
+			(* ) *)
 		| 72 ->
 			let a = self#read_abstract_ref in
 			let t1 = self#read_type_instance in
 			let t2 = self#read_type_instance in
-			TAbstract(a,[t1;t2])
+			(* self#make_lazy_type_dynamic (fun () -> *)
+				TAbstract(Lazy.force a,[t1;t2])
+			(* ) *)
 		| 79 ->
 			let a = self#read_abstract_ref in
 			let tl = self#read_types in
-			TAbstract(a,tl)
+			(* self#make_lazy_type_dynamic (fun () -> *)
+				TAbstract(Lazy.force a,tl)
+			(* ) *)
 		| 80 ->
 			empty_anon
 		| 81 ->
@@ -962,7 +1012,7 @@ class hxb_reader
 			| 5 -> VUser TVOLocalFunction
 			| 6 -> VGenerated
 			| 7 -> VInlined
-			| 8 -> VInlinedConstructorVariable
+			| 8 -> VInlinedConstructorVariable (self#read_list (fun () -> self#read_string))
 			| 9 -> VExtractorVariable
 			| 10 -> VAbstractThis
 			| _ -> assert false
@@ -1175,11 +1225,6 @@ class hxb_reader
 						let e1 = loop () in
 						let e2 = loop () in
 						TWhile(e1,e2,DoWhile),(Some api#basic_types.tvoid)
-					| 86 ->
-						let v  = declare_local () in
-						let e1 = loop () in
-						let e2 = loop () in
-						TFor(v,e1,e2),(Some api#basic_types.tvoid)
 
 					(* control flow 90-99 *)
 					| 90 ->
@@ -1204,12 +1249,14 @@ class hxb_reader
 					| 102 ->
 						let e1 = loop () in
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let tl = self#read_types in
 						let cf = self#read_field_ref in
 						TField(e1,FInstance(c,tl,cf)),None
 					| 103 ->
 						let e1 = loop () in
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let cf = self#read_field_ref in
 						TField(e1,FStatic(c,cf)),None
 					| 104 ->
@@ -1219,6 +1266,7 @@ class hxb_reader
 					| 105 ->
 						let e1 = loop () in
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let tl = self#read_types in
 						let cf = self#read_field_ref in
 						TField(e1,FClosure(Some(c,tl),cf)),None
@@ -1230,6 +1278,7 @@ class hxb_reader
 						let e1 = loop () in
 						let en = self#read_enum_ref in
 						let ef = self#read_enum_field_ref in
+						let en = Lazy.force en in
 						TField(e1,FEnum(en,ef)),None
 					| 108 ->
 						let e1 = loop () in
@@ -1239,12 +1288,14 @@ class hxb_reader
 					| 110 ->
 						let p = read_relpos () in
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let cf = self#read_field_ref in
 						let e1 = Texpr.Builder.make_static_this c p in
 						TField(e1,FStatic(c,cf)),None
 					| 111 ->
 						let p = read_relpos () in
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let tl = self#read_types in
 						let cf = self#read_field_ref in
 						let ethis = mk (TConst TThis) (Option.get fctx.tthis) p in
@@ -1253,14 +1304,16 @@ class hxb_reader
 					(* module types 120-139 *)
 					| 120 ->
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						TTypeExpr (TClassDecl c),(Some c.cl_type)
 					| 121 ->
 						let en = self#read_enum_ref in
+						let en = Lazy.force en in
 						TTypeExpr (TEnumDecl en),(Some en.e_type)
 					| 122 ->
-						TTypeExpr (TAbstractDecl self#read_abstract_ref),None
+						TTypeExpr (TAbstractDecl (Lazy.force self#read_abstract_ref)),None
 					| 123 ->
-						TTypeExpr (TTypeDecl self#read_typedef_ref),None
+						TTypeExpr (TTypeDecl (Lazy.force self#read_typedef_ref)),None
 					| 124 ->
 						TCast(loop (),None),None
 					| 125 ->
@@ -1270,6 +1323,7 @@ class hxb_reader
 						TCast(e1,Some mt),None
 					| 126 ->
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let tl = self#read_types in
 						let el = loop_el() in
 						TNew(c,tl,el),None
@@ -1321,8 +1375,9 @@ class hxb_reader
 	method read_class_field_forward =
 		let name = self#read_string in
 		let pos,name_pos = self#read_pos_pair in
+		let cf_meta = self#read_metadata in
 		let overloads = self#read_list (fun () -> self#read_class_field_forward) in
-		{ null_field with cf_name = name; cf_pos = pos; cf_name_pos = name_pos; cf_overloads = overloads }
+		{ null_field with cf_name = name; cf_pos = pos; cf_name_pos = name_pos; cf_overloads = overloads; cf_meta = cf_meta }
 
 	method start_texpr =
 		begin match read_byte ch with
@@ -1380,7 +1435,6 @@ class hxb_reader
 		let flags = read_uleb128 ch in
 
 		let doc = self#read_option (fun () -> self#read_documentation) in
-		cf.cf_meta <- self#read_metadata;
 		let kind = self#read_field_kind in
 
 		let expr,expr_unoptimized = match read_byte ch with
@@ -1445,7 +1499,6 @@ class hxb_reader
 		in
 		loop CfrMember (read_uleb128 ch) c.cl_ordered_fields;
 		loop CfrStatic (read_uleb128 ch) c.cl_ordered_statics;
-		(match c.cl_kind with KModuleFields md -> md.m_statics <- Some c; | _ -> ());
 
 	method read_enum_fields (e : tenum) =
 		type_type_parameters <- Array.of_list e.e_params;
@@ -1470,6 +1523,7 @@ class hxb_reader
 		infos.mt_params <- Array.to_list type_type_parameters;
 		infos.mt_using <- self#read_list (fun () ->
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			let p = self#read_pos in
 			(c,p)
 		)
@@ -1481,11 +1535,12 @@ class hxb_reader
 		| 3 -> KGeneric
 		| 4 ->
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			let tl = self#read_types in
 			KGenericInstance(c,tl)
 		| 5 -> KMacroType
 		| 6 -> KGenericBuild (self#read_list (fun () -> self#read_cfield))
-		| 7 -> KAbstractImpl self#read_abstract_ref
+		| 7 -> KAbstractImpl (Lazy.force self#read_abstract_ref)
 		| 8 -> KModuleFields current_module
 		| i ->
 			error (Printf.sprintf "Invalid class kind id: %i" i)
@@ -1495,6 +1550,7 @@ class hxb_reader
 		c.cl_kind <- self#read_class_kind;
 		let read_relation () =
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			let tl = self#read_types in
 			(c,tl)
 		in
@@ -1502,10 +1558,13 @@ class hxb_reader
 		c.cl_implements <- self#read_list read_relation;
 		c.cl_dynamic <- self#read_option (fun () -> self#read_type_instance);
 		c.cl_array_access <- self#read_option (fun () -> self#read_type_instance);
+		(match c.cl_kind with
+			| KModuleFields md -> md.m_statics <- Some c;
+			| _ -> ());
 
 	method read_abstract (a : tabstract) =
 		self#read_common_module_type (Obj.magic a);
-		a.a_impl <- self#read_option (fun () -> self#read_class_ref);
+		a.a_impl <- self#read_option (fun () -> Lazy.force self#read_class_ref);
 		begin match read_byte ch with
 			| 0 ->
 				a.a_this <- TAbstract(a,extract_param_types a.a_params)
@@ -1514,6 +1573,7 @@ class hxb_reader
 		end;
 		a.a_from <- self#read_list (fun () -> self#read_type_instance);
 		a.a_to <- self#read_list (fun () -> self#read_type_instance);
+		a.a_extern <- self#read_bool;
 		a.a_enum <- self#read_bool;
 
 	method read_abstract_fields (a : tabstract) =
@@ -1521,6 +1581,7 @@ class hxb_reader
 		a.a_read <- self#read_option (fun () -> self#read_field_ref);
 		a.a_write <- self#read_option (fun () -> self#read_field_ref);
 		a.a_call <- self#read_option (fun () -> self#read_field_ref);
+		a.a_constructor <- self#read_option (fun () -> self#read_field_ref);
 
 		a.a_ops <- self#read_list (fun () ->
 			let i = read_byte ch in
@@ -1556,7 +1617,7 @@ class hxb_reader
 
 	method read_enum (e : tenum) =
 		self#read_common_module_type (Obj.magic e);
-		e.e_extern <- self#read_bool;
+		e.e_flags <- read_uleb128 ch;
 		e.e_names <- self#read_list (fun () -> self#read_string);
 
 	method read_typedef (td : tdef) =
@@ -1583,15 +1644,33 @@ class hxb_reader
 		let a = Array.init l (fun i ->
 			let en = self#read_enum_ref in
 			let name = self#read_string in
-			PMap.find name en.e_constrs
+			Lazy.from_fun (fun () ->
+				let en = Lazy.force en in
+				PMap.find name en.e_constrs
+			)
 		) in
 		enum_fields <- a
 
-	method read_afr =
+	method read_ofr =
 		let l = read_uleb128 ch in
 		let a = Array.init l (fun _ -> self#read_class_field_forward) in
 		anon_fields <- a
 
+	method read_ofd =
+		let l = read_uleb128 ch in
+		for _ = 0 to l - 1 do
+			let index = read_uleb128 ch in
+			let cf = anon_fields.(index) in
+			self#read_class_field_and_overloads_data cf;
+		done
+
+	method read_obd =
+		let l = read_uleb128 ch in
+		for _ = 0 to l - 1 do
+			let index = read_uleb128 ch in
+			self#read_anon anons.(index)
+		done
+
 	method read_cfr =
 		let l = read_uleb128 ch in
 		let a = Array.init l (fun i ->
@@ -1603,44 +1682,56 @@ class hxb_reader
 				| 3 -> CfrInit
 				| _ -> die "" __LOC__
 			in
-			let cf =  match kind with
-				| CfrStatic ->
-					let name = self#read_string in
-					begin try
-						PMap.find name c.cl_statics
-					with Not_found ->
-						raise (HxbFailure (Printf.sprintf "Could not read static field %s on %s while hxbing %s" name (s_type_path c.cl_path) (s_type_path current_module.m_path)))
-					end;
+			let name = match kind with
+				| CfrStatic
 				| CfrMember ->
-					let name = self#read_string in
-					begin try
-						PMap.find name c.cl_fields
-					with Not_found ->
-						raise (HxbFailure (Printf.sprintf "Could not read instance field %s on %s while hxbing %s" name (s_type_path c.cl_path) (s_type_path current_module.m_path)))
-					end
-				| CfrConstructor ->
-					Option.get c.cl_constructor
+					Some self#read_string
+				| CfrConstructor
 				| CfrInit ->
-					Option.get c.cl_init
-			in
-			let pick_overload cf depth =
-				let rec loop depth cfl = match cfl with
-					| cf :: cfl ->
-						if depth = 0 then
-							cf
-						else
-							loop (depth - 1) cfl
-					| [] ->
-						raise (HxbFailure (Printf.sprintf "Bad overload depth for %s on %s: %i" cf.cf_name (s_type_path c.cl_path) depth))
-				in
-				let cfl = cf :: cf.cf_overloads in
-				loop depth cfl
+					None
 			in
 			let depth = read_uleb128 ch in
-			if depth = 0 then
-				cf
-			else
-				pick_overload cf depth;
+
+			Lazy.from_fun (fun () ->
+				let c = Lazy.force c in
+				let cf = match kind with
+					| CfrStatic ->
+						let name = Option.get name in
+						begin try
+							PMap.find name c.cl_statics
+						with Not_found ->
+							raise (HxbFailure (Printf.sprintf "Could not read static field %s on %s while hxbing %s" name (s_type_path c.cl_path) (s_type_path current_module.m_path)))
+						end;
+					| CfrMember ->
+						let name = Option.get name in
+						begin try
+							PMap.find name c.cl_fields
+						with Not_found ->
+							raise (HxbFailure (Printf.sprintf "Could not read instance field %s on %s while hxbing %s" name (s_type_path c.cl_path) (s_type_path current_module.m_path)))
+						end
+					| CfrConstructor ->
+						Option.get c.cl_constructor
+					| CfrInit ->
+						Option.get c.cl_init
+				in
+				let pick_overload cf depth =
+					let rec loop depth cfl = match cfl with
+						| cf :: cfl ->
+							if depth = 0 then
+								cf
+							else
+								loop (depth - 1) cfl
+						| [] ->
+							raise (HxbFailure (Printf.sprintf "Bad overload depth for %s on %s: %i" cf.cf_name (s_type_path c.cl_path) depth))
+					in
+					let cfl = cf :: cf.cf_overloads in
+					loop depth cfl
+				in
+				if depth = 0 then
+					cf
+				else
+					pick_overload cf depth;
+			)
 		) in
 		class_fields <- a
 
@@ -1648,12 +1739,14 @@ class hxb_reader
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
 			let c = classes.(i) in
+			let c = Lazy.force c in
 			self#read_class_fields c;
 		done
 
 	method read_exd =
 		ignore(self#read_list (fun () ->
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			self#read_list (fun () ->
 				let cf = self#read_field_ref in
 				let length = read_uleb128 ch in
@@ -1676,14 +1769,12 @@ class hxb_reader
 					read_expressions ()
 				else begin
 					let t = cf.cf_type in
-					let r = ref (lazy_available t) in
-					r := lazy_wait (fun() ->
+					let tl = api#make_lazy_type cf.cf_type (fun () ->
 						cf.cf_type <- t;
-						r := lazy_available t;
-						read_expressions ();
+						read_expressions();
 						t
-					);
-					cf.cf_type <- TLazy r
+					) in
+					cf.cf_type <- tl
 				end
 			)
 		))
@@ -1691,7 +1782,7 @@ class hxb_reader
 	method read_afd =
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
-			let a = abstracts.(i) in
+			let a = Lazy.force abstracts.(i) in
 			self#read_abstract_fields a;
 		done
 
@@ -1699,27 +1790,28 @@ class hxb_reader
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
 			let c = classes.(i) in
+			let c = Lazy.force c in
 			self#read_class c;
 		done
 
 	method read_abd =
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
-			let a = abstracts.(i) in
+			let a = Lazy.force abstracts.(i) in
 			self#read_abstract a;
 		done
 
 	method read_end =
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
-			let en = enums.(i) in
+			let en = Lazy.force enums.(i) in
 			self#read_enum en;
 		done
 
 	method read_efd =
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
-			let e = enums.(i) in
+			let e = Lazy.force enums.(i) in
 			self#read_enum_fields e;
 			Type.unify (TType(enum_module_type e,[])) e.e_type
 		done
@@ -1748,62 +1840,68 @@ class hxb_reader
 			an.a_status := Extend self#read_types;
 			read_fields ()
 		| _ -> assert false
-		end;
-
-		an
+		end
 
 	method read_tdd =
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
-			let t = typedefs.(i) in
+			let t = Lazy.force typedefs.(i) in
 			self#read_typedef t;
 		done
 
 	method read_clr =
 		let l = read_uleb128 ch in
 		classes <- (Array.init l (fun i ->
-				let (pack,mname,tname) = self#read_full_path in
+			let (pack,mname,tname) = self#read_full_path in
+			Lazy.from_fun (fun () ->
 				match self#resolve_type pack mname tname with
 				| TClassDecl c ->
 					c
 				| _ ->
 					error ("Unexpected type where class was expected: " ^ (s_type_path (pack,tname)))
+			)
 		))
 
 	method read_abr =
 		let l = read_uleb128 ch in
 		abstracts <- (Array.init l (fun i ->
 			let (pack,mname,tname) = self#read_full_path in
-			match self#resolve_type pack mname tname with
-			| TAbstractDecl a ->
-				a
-			| _ ->
-				error ("Unexpected type where abstract was expected: " ^ (s_type_path (pack,tname)))
+			Lazy.from_fun (fun () ->
+				match self#resolve_type pack mname tname with
+				| TAbstractDecl a ->
+					a
+				| _ ->
+					error ("Unexpected type where abstract was expected: " ^ (s_type_path (pack,tname)))
+			)
 		))
 
 	method read_enr =
 		let l = read_uleb128 ch in
 		enums <- (Array.init l (fun i ->
 			let (pack,mname,tname) = self#read_full_path in
-			match self#resolve_type pack mname tname with
-			| TEnumDecl en ->
-				en
-			| _ ->
-				error ("Unexpected type where enum was expected: " ^ (s_type_path (pack,tname)))
+			Lazy.from_fun (fun () ->
+				match self#resolve_type pack mname tname with
+				| TEnumDecl en ->
+					en
+				| _ ->
+					error ("Unexpected type where enum was expected: " ^ (s_type_path (pack,tname)))
+			)
 		))
 
 	method read_tdr =
 		let l = read_uleb128 ch in
 		typedefs <- (Array.init l (fun i ->
 			let (pack,mname,tname) = self#read_full_path in
-			match self#resolve_type pack mname tname with
-			| TTypeDecl tpd ->
-				tpd
-			| _ ->
-				error ("Unexpected type where typedef was expected: " ^ (s_type_path (pack,tname)))
+			Lazy.from_fun (fun () ->
+				match self#resolve_type pack mname tname with
+				| TTypeDecl tpd ->
+					tpd
+				| _ ->
+					error ("Unexpected type where typedef was expected: " ^ (s_type_path (pack,tname)))
+			)
 		))
 
-	method read_mdr =
+	method read_imports =
 		let length = read_uleb128 ch in
 		for _ = 0 to length - 1 do
 			let path = self#read_path in
@@ -1856,7 +1954,7 @@ class hxb_reader
 				let read_field () =
 					let name = self#read_string in
 					let pos,name_pos = self#read_pos_pair in
-					let index = read_byte ch in
+					let index = read_uleb128 ch in
 
 					{ null_enum_field with
 						ef_name = name;
@@ -1878,12 +1976,12 @@ class hxb_reader
 			| 2 ->
 				let td = mk_typedef current_module path pos name_pos (mk_mono()) in
 				td.t_params <- Array.to_list params;
-				typedefs <- Array.append typedefs (Array.make 1 td);
+				typedefs <- Array.append typedefs (Array.make 1 (Lazy.from_val td));
 				TTypeDecl td
 			| 3 ->
 				let a = mk_abstract current_module path pos name_pos in
 				a.a_params <- Array.to_list params;
-				abstracts <- Array.append abstracts (Array.make 1 a);
+				abstracts <- Array.append abstracts (Array.make 1 (Lazy.from_val a));
 				TAbstractDecl a
 			| _ ->
 				error ("Invalid type kind: " ^ (string_of_int kind));
@@ -1913,11 +2011,13 @@ class hxb_reader
 			doc_pool <- self#read_string_pool;
 		| MDF ->
 			current_module <- self#read_mdf;
+			incr stats.modules_partially_restored;
+			if not full_restore then current_module.m_extra.m_display_deps <- Some PMap.empty
 		| MTF ->
 			current_module.m_types <- self#read_mtf;
 			api#add_module current_module;
-		| MDR ->
-			self#read_mdr;
+		| IMP ->
+			if full_restore then self#read_imports;
 		| CLR ->
 			self#read_clr;
 		| ENR ->
@@ -1926,8 +2026,12 @@ class hxb_reader
 			self#read_abr;
 		| TDR ->
 			self#read_tdr;
-		| AFR ->
-			self#read_afr;
+		| OFR ->
+			self#read_ofr;
+		| OFD ->
+			self#read_ofd;
+		| OBD ->
+			self#read_obd
 		| CLD ->
 			self#read_cld;
 		| END ->
@@ -1955,18 +2059,39 @@ class hxb_reader
 		| EOM ->
 			incr stats.modules_fully_restored;
 
+	method private get_backtrace () = Printexc.get_raw_backtrace ()
+	method private get_callstack () = Printexc.get_callstack 200
+
+	method private failwith chunk msg backtrace =
+		let msg =
+			(Printf.sprintf "Compiler failure while reading hxb chunk %s of %s: %s\n" (string_of_chunk_kind chunk) (s_type_path mpath) (msg))
+			^ "Please submit an issue at https://github.com/HaxeFoundation/haxe/issues/new\n"
+			^ "Attach the following information:"
+		in
+		let backtrace = Printexc.raw_backtrace_to_string backtrace in
+		raise (Globals.Ice (msg, backtrace))
+
 	method private read_chunk_data kind =
 		let path = String.concat "_" (ExtLib.String.nsplit (s_type_path mpath) ".") in
 		let id = ["hxb";"read";string_of_chunk_kind kind;path] in
-		let close = Timer.timer id in
-		self#read_chunk_data' kind;
+		let close = match timer_ctx with
+			| Some timer_ctx -> Timer.start_timer timer_ctx id
+			| None -> (fun () -> ())
+		in
+		try
+			self#read_chunk_data' kind
+		with Invalid_argument msg -> begin
+			close();
+			self#failwith kind msg (self#get_backtrace ())
+		end;
 		close()
 
 	method read_chunks (new_api : hxb_reader_api) (chunks : cached_chunks) =
-		fst (self#read_chunks_until new_api chunks EOM)
+		fst (self#read_chunks_until new_api chunks EOM true)
 
-	method read_chunks_until (new_api : hxb_reader_api) (chunks : cached_chunks) end_chunk =
+	method read_chunks_until (new_api : hxb_reader_api) (chunks : cached_chunks) end_chunk full_restore' =
 		api <- new_api;
+		full_restore <- full_restore';
 		let rec loop = function
 			| (kind,data) :: chunks ->
 				ch <- BytesWithPosition.create data;
@@ -1979,6 +2104,7 @@ class hxb_reader
 
 	method read (new_api : hxb_reader_api) (bytes : bytes) =
 		api <- new_api;
+		full_restore <- true;
 		ch <- BytesWithPosition.create bytes;
 		if (Bytes.to_string (read_bytes ch 3)) <> "hxb" then
 			raise (HxbFailure "magic");

+ 14 - 0
src/compiler/hxb/hxbReaderApi.ml

@@ -9,4 +9,18 @@ class virtual hxb_reader_api = object(self)
 	method virtual basic_types : basic_types
 	method virtual get_var_id : int -> int
 	method virtual read_expression_eagerly : tclass_field -> bool
+	method virtual make_lazy_type : Type.t -> (unit -> Type.t) -> Type.t
 end
+
+class hxb_reader_api_null = object(self)
+	inherit hxb_reader_api
+
+	method make_module _ = assert false
+	method add_module _ = assert false
+	method resolve_type _ _ _ = assert false
+	method resolve_module _ = assert false
+	method basic_types = assert false
+	method get_var_id _ = assert false
+	method read_expression_eagerly _ = assert false
+	method make_lazy_type _ _ = assert false
+end

+ 179 - 121
src/compiler/hxb/hxbWriter.ml

@@ -45,52 +45,6 @@ let unop_index op flag = match op,flag with
 	| NegBits,Postfix -> 10
 	| Spread,Postfix -> 11
 
-module StringHashtbl = Hashtbl.Make(struct
-	type t = string
-
-	let equal =
-		String.equal
-
-	let hash s =
-		(* What's the best here? *)
-		Hashtbl.hash s
-end)
-
-module StringPool = struct
-	type t = {
-		lut : int StringHashtbl.t;
-		items : string DynArray.t;
-		mutable closed : bool;
-	}
-
-	let create () = {
-		lut = StringHashtbl.create 16;
-		items = DynArray.create ();
-		closed = false;
-	}
-
-	let add sp s =
-		assert (not sp.closed);
-		let index = DynArray.length sp.items in
-		StringHashtbl.add sp.lut s index;
-		DynArray.add sp.items s;
-		index
-
-	let get sp s =
-		StringHashtbl.find sp.lut s
-
-	let get_or_add sp s =
-		try
-			get sp s
-		with Not_found ->
-			add sp s
-
-	let finalize sp =
-		assert (not sp.closed);
-		sp.closed <- true;
-		DynArray.to_list sp.items,DynArray.length sp.items
-end
-
 module Pool = struct
 	type ('key,'value) t = {
 		lut : ('key,int) Hashtbl.t;
@@ -443,6 +397,7 @@ type hxb_writer = {
 	config : HxbWriterConfig.writer_target_config;
 	warn : Warning.warning -> string -> Globals.pos -> unit;
 	anon_id : Type.t Tanon_identification.tanon_identification;
+	identified_anons : (tanon,int) IdentityPool.t;
 	mutable current_module : module_def;
 	chunks : Chunk.t DynArray.t;
 	cp : StringPool.t;
@@ -453,8 +408,8 @@ type hxb_writer = {
 	enums : (path,tenum) Pool.t;
 	typedefs : (path,tdef) Pool.t;
 	abstracts : (path,tabstract) Pool.t;
-	anons : (path,tanon) Pool.t;
-	anon_fields : (string,tclass_field,unit) HashedIdentityPool.t;
+	anons : (path,bytes option) Pool.t;
+	anon_fields : (string,tclass_field,bytes option) HashedIdentityPool.t;
 	tmonos : (tmono,unit) IdentityPool.t;
 
 	own_classes : (path,tclass) Pool.t;
@@ -468,11 +423,26 @@ type hxb_writer = {
 	mutable field_type_parameters : (typed_type_param,unit) IdentityPool.t;
 	mutable local_type_parameters : (typed_type_param,unit) IdentityPool.t;
 	mutable field_stack : unit list;
+	mutable wrote_local_type_param : bool;
+	mutable needs_local_context : bool;
 	unbound_ttp : (typed_type_param,unit) IdentityPool.t;
+	unclosed_mono : (tmono,unit) IdentityPool.t;
 	t_instance_chunk : Chunk.t;
 }
 
 module HxbWriter = struct
+	let get_backtrace () = Printexc.get_raw_backtrace ()
+	let get_callstack () = Printexc.get_callstack 200
+
+	let failwith writer msg backtrace =
+		let msg =
+			(Printf.sprintf "Compiler failure while writing hxb chunk %s of %s: %s\n" (string_of_chunk_kind writer.chunk.kind) (s_type_path writer.current_module.m_path) (msg))
+			^ "Please submit an issue at https://github.com/HaxeFoundation/haxe/issues/new\n"
+			^ "Attach the following information:"
+		in
+		let backtrace = Printexc.raw_backtrace_to_string backtrace in
+		raise (Globals.Ice (msg, backtrace))
+
 	let in_nested_scope writer = match writer.field_stack with
 		| [] -> false (* can happen for cl_init and in EXD *)
 		| [_] -> false
@@ -484,8 +454,8 @@ module HxbWriter = struct
 		let initial_size = match kind with
 			| EOT | EOF | EOM -> 0
 			| MDF -> 16
-			| MTF | MDR | CLR | END | ABD | ENR | ABR | TDR | EFR | CFR | AFD -> 64
-			| AFR | CLD | TDD | EFD -> 128
+			| MTF | IMP | CLR | END | ABD | ENR | ABR | TDR | EFR | CFR | AFD -> 64
+			| OFR | OFD | OBD | CLD | TDD | EFD -> 128
 			| STR | DOC -> 256
 			| CFD | EXD -> 512
 		in
@@ -526,7 +496,7 @@ module HxbWriter = struct
 	let write_full_path writer (pack : string list) (mname : string) (tname : string) =
 		Chunk.write_list writer.chunk pack (Chunk.write_string writer.chunk);
 		if mname = "" || tname = "" then
-			die (Printf.sprintf "write_full_path: pack = %s, mname = %s, tname = %s" (String.concat "." pack) mname tname) __LOC__;
+			failwith writer (Printf.sprintf "write_full_path: pack = %s, mname = %s, tname = %s" (String.concat "." pack) mname tname) (get_callstack ());
 		Chunk.write_string writer.chunk mname;
 		Chunk.write_string writer.chunk tname
 
@@ -979,20 +949,22 @@ module HxbWriter = struct
 			Chunk.write_uleb128 writer.chunk (Pool.add writer.enum_fields key (en,ef))
 
 	let write_var_kind writer vk =
-		let b = match vk with
-			| VUser TVOLocalVariable -> 0
-			| VUser TVOArgument -> 1
-			| VUser TVOForVariable -> 2
-			| VUser TVOPatternVariable -> 3
-			| VUser TVOCatchVariable -> 4
-			| VUser TVOLocalFunction -> 5
-			| VGenerated -> 6
-			| VInlined -> 7
-			| VInlinedConstructorVariable -> 8
-			| VExtractorVariable -> 9
-			| VAbstractThis -> 10
-		in
-		Chunk.write_u8 writer.chunk b
+		let b,sl = match vk with
+			| VUser TVOLocalVariable -> 0, []
+			| VUser TVOArgument -> 1, []
+			| VUser TVOForVariable -> 2, []
+			| VUser TVOPatternVariable -> 3, []
+			| VUser TVOCatchVariable -> 4, []
+			| VUser TVOLocalFunction -> 5, []
+			| VGenerated -> 6, []
+			| VInlined -> 7, []
+			| VInlinedConstructorVariable sl -> 8, sl
+			| VExtractorVariable -> 9, []
+			| VAbstractThis -> 10, []
+		in begin
+			Chunk.write_u8 writer.chunk b;
+			if (b == 8) then Chunk.write_list writer.chunk sl (Chunk.write_string writer.chunk);
+		end
 
 	let write_var writer fctx v =
 		Chunk.write_uleb128 writer.chunk v.v_id;
@@ -1002,7 +974,7 @@ module HxbWriter = struct
 		write_metadata writer v.v_meta;
 		write_pos writer v.v_pos
 
-	let rec write_anon writer (an : tanon) (ttp : type_params) =
+	let rec write_anon writer (an : tanon) =
 		let write_fields () =
 			let restore = start_temporary_chunk writer 256 in
 			let i = ref 0 in
@@ -1033,17 +1005,34 @@ module HxbWriter = struct
 			assert false
 		end
 
-	and write_anon_ref writer (an : tanon) (ttp : type_params) =
-		let pfm = Option.get (writer.anon_id#identify_anon ~strict:true an) in
+	and write_anon_ref writer (an : tanon) =
 		try
-			let index = Pool.get writer.anons pfm.pfm_path in
+			let index = IdentityPool.get writer.identified_anons an in
 			Chunk.write_u8 writer.chunk 0;
 			Chunk.write_uleb128 writer.chunk index
 		with Not_found ->
-			let index = Pool.add writer.anons pfm.pfm_path an in
-			Chunk.write_u8 writer.chunk 1;
-			Chunk.write_uleb128 writer.chunk index;
-			write_anon writer an ttp
+			let pfm = writer.anon_id#identify_anon ~strict:true an in
+			try
+				let index = Pool.get writer.anons pfm.pfm_path in
+				Chunk.write_u8 writer.chunk 0;
+				Chunk.write_uleb128 writer.chunk index
+			with Not_found ->
+				let restore = start_temporary_chunk writer 256 in
+				writer.needs_local_context <- false;
+				write_anon writer an;
+				let bytes = restore (fun new_chunk -> Chunk.get_bytes new_chunk) in
+				if writer.needs_local_context then begin
+					let index = Pool.add writer.anons pfm.pfm_path None in
+					ignore(IdentityPool.add writer.identified_anons an index);
+					Chunk.write_u8 writer.chunk 1;
+					Chunk.write_uleb128 writer.chunk index;
+					Chunk.write_bytes writer.chunk bytes
+				end else begin
+					let index = Pool.add writer.anons pfm.pfm_path (Some bytes) in
+					ignore(IdentityPool.add writer.identified_anons an index);
+					Chunk.write_u8 writer.chunk 0;
+					Chunk.write_uleb128 writer.chunk index;
+				end
 
 	and write_anon_field_ref writer cf =
 		try
@@ -1051,10 +1040,25 @@ module HxbWriter = struct
 			Chunk.write_u8 writer.chunk 0;
 			Chunk.write_uleb128 writer.chunk index
 		with Not_found ->
-			let index = HashedIdentityPool.add writer.anon_fields cf.cf_name cf () in
-			Chunk.write_u8 writer.chunk 1;
-			Chunk.write_uleb128 writer.chunk index;
-			ignore(write_class_field_and_overloads_data writer true cf)
+			let restore = start_temporary_chunk writer 256 in
+			let old = writer.wrote_local_type_param in
+			writer.wrote_local_type_param <- false;
+			ignore(write_class_field_and_overloads_data writer true cf);
+			let bytes = restore (fun new_chunk -> Chunk.get_bytes new_chunk) in
+			if writer.needs_local_context || writer.wrote_local_type_param then begin
+				(* If we access something from the method scope, we have to write the anon field immediately.
+				   This should be fine because in such cases the field cannot be referenced elsewhere. *)
+				let index = HashedIdentityPool.add writer.anon_fields cf.cf_name cf None in
+				writer.needs_local_context <- true;
+				Chunk.write_u8 writer.chunk 1;
+				Chunk.write_uleb128 writer.chunk index;
+				Chunk.write_bytes writer.chunk bytes
+			end else begin
+				let index = HashedIdentityPool.add writer.anon_fields cf.cf_name cf (Some bytes) in
+				Chunk.write_u8 writer.chunk 0;
+				Chunk.write_uleb128 writer.chunk index;
+			end;
+			writer.wrote_local_type_param <- old
 
 	(* Type instances *)
 
@@ -1063,24 +1067,32 @@ module HxbWriter = struct
 			begin match ttp.ttp_host with
 			| TPHType ->
 				let i = Pool.get writer.type_type_parameters ttp.ttp_name in
+				(* TODO: this isn't correct, but if we don't do this we'll have to communicate the current class *)
+				writer.wrote_local_type_param <- true;
 				Chunk.write_u8 writer.chunk 1;
 				Chunk.write_uleb128 writer.chunk i
 			| TPHMethod | TPHEnumConstructor | TPHAnonField | TPHConstructor ->
 				let i = IdentityPool.get writer.field_type_parameters ttp in
+				writer.wrote_local_type_param <- true;
 				Chunk.write_u8 writer.chunk 2;
 				Chunk.write_uleb128 writer.chunk i;
 			| TPHLocal ->
 				let index = IdentityPool.get writer.local_type_parameters ttp in
+				writer.wrote_local_type_param <- true;
 				Chunk.write_u8 writer.chunk 3;
 				Chunk.write_uleb128 writer.chunk index;
+			| TPHUnbound ->
+				raise Not_found
 		end with Not_found ->
 			(try ignore(IdentityPool.get writer.unbound_ttp ttp) with Not_found -> begin
 				ignore(IdentityPool.add writer.unbound_ttp ttp ());
-				let p = { null_pos with pfile = (Path.UniqueKey.lazy_path writer.current_module.m_extra.m_file) } in
+				let p = file_pos (Path.UniqueKey.lazy_path writer.current_module.m_extra.m_file) in
 				let msg = Printf.sprintf "Unbound type parameter %s" (s_type_path ttp.ttp_class.cl_path) in
 				writer.warn WUnboundTypeParameter msg p
 			end);
-			Chunk.write_u8 writer.chunk 4; (* TDynamic None *)
+			writer.wrote_local_type_param <- true;
+			Chunk.write_u8 writer.chunk 5;
+			write_path writer ttp.ttp_class.cl_path;
 		end
 
 	(*
@@ -1170,7 +1182,19 @@ module HxbWriter = struct
 			| TInst ({cl_path = ([],"String")},[]) ->
 				Chunk.write_u8 writer.chunk 104;
 			| TMono r ->
-				Monomorph.close r;
+				(try Monomorph.close r with TUnification.Unify_error e ->
+					try ignore(IdentityPool.get writer.unclosed_mono r) with Not_found -> begin
+						ignore(IdentityPool.add writer.unclosed_mono r ());
+
+						let p = file_pos (Path.UniqueKey.lazy_path writer.current_module.m_extra.m_file) in
+						let msg = Printf.sprintf
+							"Error while handling unclosed monomorph:\n%s\n\n%s"
+								(Error.error_msg (Unify e))
+								"Unclosed monomorph should not reach hxb writer, please submit an issue at https://github.com/HaxeFoundation/haxe/issues/new"
+						in
+						writer.warn WUnclosedMonomorph msg p
+					end;
+				);
 				begin match r.tm_type with
 				| None ->
 					Chunk.write_u8 writer.chunk 0;
@@ -1239,7 +1263,7 @@ module HxbWriter = struct
 				Chunk.write_u8 writer.chunk 80;
 			| TAnon an ->
 				Chunk.write_u8 writer.chunk 81;
-				write_anon_ref writer an []
+				write_anon_ref writer an
 			| TDynamic (Some t) ->
 				Chunk.write_u8 writer.chunk 89;
 				write_type_instance writer t
@@ -1459,12 +1483,6 @@ module HxbWriter = struct
 				loop e1;
 				loop e2;
 				false;
-			| TFor(v,e1,e2) ->
-				Chunk.write_u8 writer.chunk 86;
-				declare_var v;
-				loop e1;
-				loop e2;
-				false;
 			(* control flow 90-99 *)
 			| TReturn None ->
 				Chunk.write_u8 writer.chunk 90;
@@ -1638,6 +1656,7 @@ module HxbWriter = struct
 				| TPHEnumConstructor -> 3
 				| TPHAnonField -> 4
 				| TPHLocal -> 5
+				| TPHUnbound -> 6
 			in
 			Chunk.write_u8 writer.chunk i
 		in
@@ -1716,6 +1735,7 @@ module HxbWriter = struct
 	and write_class_field_forward writer cf =
 		Chunk.write_string writer.chunk cf.cf_name;
 		write_pos_pair writer cf.cf_pos cf.cf_name_pos;
+		write_metadata writer cf.cf_meta;
 		Chunk.write_list writer.chunk cf.cf_overloads (fun cf ->
 			write_class_field_forward writer cf;
 		);
@@ -1734,11 +1754,11 @@ module HxbWriter = struct
 					write_type_parameters writer ltp
 				end;
 				Chunk.write_option writer.chunk fctx.texpr_this (fun e -> write_type_instance writer e.etype);
-				let items,length = StringPool.finalize fctx.t_pool in
-				Chunk.write_uleb128 writer.chunk length;
-				List.iter (fun bytes ->
+				let a = StringPool.finalize fctx.t_pool in
+				Chunk.write_uleb128 writer.chunk a.length;
+				StringDynArray.iter a (fun bytes ->
 					Chunk.write_bytes writer.chunk (Bytes.unsafe_of_string bytes)
-				) items;
+				);
 				Chunk.write_uleb128 writer.chunk (DynArray.length fctx.vars);
 				DynArray.iter (fun (v,v_id) ->
 					v.v_id <- v_id;
@@ -1764,7 +1784,6 @@ module HxbWriter = struct
 		write_type_instance writer cf.cf_type;
 		Chunk.write_uleb128 writer.chunk cf.cf_flags;
 		maybe_write_documentation writer cf.cf_doc;
-		write_metadata writer cf.cf_meta;
 		write_field_kind writer cf.cf_kind;
 		let expr_chunk = match cf.cf_expr with
 			| None ->
@@ -1879,6 +1898,7 @@ module HxbWriter = struct
 		end;
 		Chunk.write_list writer.chunk a.a_from (write_type_instance writer);
 		Chunk.write_list writer.chunk a.a_to (write_type_instance writer);
+		Chunk.write_bool writer.chunk a.a_extern;
 		Chunk.write_bool writer.chunk a.a_enum
 
 	let write_abstract_fields writer (a : tabstract) =
@@ -1893,6 +1913,7 @@ module HxbWriter = struct
 		Chunk.write_option writer.chunk a.a_read (write_field_ref writer c CfrStatic );
 		Chunk.write_option writer.chunk a.a_write (write_field_ref writer c CfrStatic);
 		Chunk.write_option writer.chunk a.a_call (write_field_ref writer c CfrStatic);
+		Chunk.write_option writer.chunk a.a_constructor (write_field_ref writer c CfrStatic);
 
 		Chunk.write_list writer.chunk a.a_ops (fun (op, cf) ->
 			Chunk.write_u8 writer.chunk (binop_index op);
@@ -1915,7 +1936,7 @@ module HxbWriter = struct
 	let write_enum writer (e : tenum) =
 		select_type writer e.e_path;
 		write_common_module_type writer (Obj.magic e);
-		Chunk.write_bool writer.chunk e.e_extern;
+		Chunk.write_uleb128 writer.chunk e.e_flags;
 		Chunk.write_list writer.chunk e.e_names (Chunk.write_string writer.chunk)
 
 	let write_typedef writer (td : tdef) =
@@ -1996,13 +2017,21 @@ module HxbWriter = struct
 			Chunk.write_list writer.chunk (PMap.foldi (fun s f acc -> (s,f) :: acc) e.e_constrs []) (fun (s,ef) ->
 				Chunk.write_string writer.chunk s;
 				write_pos_pair writer ef.ef_pos ef.ef_name_pos;
-				Chunk.write_u8 writer.chunk ef.ef_index
+				Chunk.write_uleb128 writer.chunk ef.ef_index
 			);
 		| TAbstractDecl a ->
 			()
 		| TTypeDecl t ->
 			()
 
+	let write_string_pool writer kind a =
+		start_chunk writer kind;
+		Chunk.write_uleb128 writer.chunk a.StringDynArray.length;
+		StringDynArray.iter a (fun s ->
+			let b = Bytes.unsafe_of_string s in
+			Chunk.write_bytes_length_prefixed writer.chunk b;
+		)
+
 	let write_module writer (m : module_def) =
 		writer.current_module <- m;
 
@@ -2121,11 +2150,28 @@ module HxbWriter = struct
 
 		let items = HashedIdentityPool.finalize writer.anon_fields in
 		if DynArray.length items > 0 then begin
-			start_chunk writer AFR;
+			start_chunk writer OFR;
 			Chunk.write_uleb128 writer.chunk (DynArray.length items);
 			DynArray.iter (fun (cf,_) ->
 				write_class_field_forward writer cf
 			) items;
+
+			let anon_fields_with_expr = DynArray.create () in
+			DynArray.iteri (fun i (_,bytes) -> match bytes with
+				| None ->
+					()
+				| Some bytes ->
+					DynArray.add anon_fields_with_expr (i,bytes)
+			) items;
+			if DynArray.length anon_fields_with_expr > 0 then begin
+				start_chunk writer OFD;
+				Chunk.write_uleb128 writer.chunk (DynArray.length anon_fields_with_expr);
+				DynArray.iter (fun (index,bytes) ->
+					Chunk.write_uleb128 writer.chunk index;
+					Chunk.write_bytes writer.chunk bytes
+				) anon_fields_with_expr
+			end;
+
 		end;
 
 		let items = Pool.finalize writer.classes in
@@ -2164,47 +2210,55 @@ module HxbWriter = struct
 		start_chunk writer MDF;
 		write_path writer m.m_path;
 		Chunk.write_string writer.chunk (Path.UniqueKey.lazy_path m.m_extra.m_file);
-		Chunk.write_uleb128 writer.chunk (DynArray.length (Pool.finalize writer.anons));
+		let anons = Pool.finalize writer.anons in
+		Chunk.write_uleb128 writer.chunk (DynArray.length anons);
 		Chunk.write_uleb128 writer.chunk (DynArray.length (IdentityPool.finalize writer.tmonos));
 
+		let anons_without_context = DynArray.create () in
+		DynArray.iteri (fun i bytes -> match bytes with
+			| None ->
+				()
+			| Some bytes ->
+				DynArray.add anons_without_context (i,bytes)
+		) anons;
+		if DynArray.length anons_without_context > 0 then begin
+			start_chunk writer OBD;
+			Chunk.write_uleb128 writer.chunk (DynArray.length anons_without_context);
+			DynArray.iter (fun (i,bytes) ->
+				Chunk.write_uleb128 writer.chunk i;
+				Chunk.write_bytes writer.chunk bytes
+			) anons_without_context
+		end;
+
 		begin
-			let deps = DynArray.create () in
+			let imports = DynArray.create () in
 			PMap.iter (fun _ mdep ->
-				match mdep.md_kind with
-				| MCode | MExtern when mdep.md_sign = m.m_extra.m_sign ->
-					DynArray.add deps mdep.md_path;
+				match mdep.md_kind, mdep.md_origin with
+				| (MCode | MExtern), MDepFromImport when mdep.md_sign = m.m_extra.m_sign ->
+					DynArray.add imports mdep.md_path;
 				| _ ->
 					()
 			) m.m_extra.m_deps;
-			if DynArray.length deps > 0 then begin
-				start_chunk writer MDR;
-				Chunk.write_uleb128 writer.chunk (DynArray.length deps);
+
+			if DynArray.length imports > 0 then begin
+				start_chunk writer IMP;
+				Chunk.write_uleb128 writer.chunk (DynArray.length imports);
 				DynArray.iter (fun path ->
 					write_path writer path
-				) deps
-			end
+				) imports
+			end;
 		end;
 
 		start_chunk writer EOT;
 		start_chunk writer EOF;
 		start_chunk writer EOM;
 
-		let finalize_string_pool kind items length =
-			start_chunk writer kind;
-			Chunk.write_uleb128 writer.chunk length;
-			List.iter (fun s ->
-				let b = Bytes.unsafe_of_string s in
-				Chunk.write_bytes_length_prefixed writer.chunk b;
-			) items
-		in
-		begin
-			let items,length = StringPool.finalize writer.cp in
-			finalize_string_pool STR items length
-		end;
+		let a = StringPool.finalize writer.cp in
+		write_string_pool writer STR a;
 		begin
-			let items,length = StringPool.finalize writer.docs in
-			if length > 0 then
-				finalize_string_pool DOC items length
+			let a = StringPool.finalize writer.docs in
+			if a.length > 0 then
+				write_string_pool writer DOC a
 		end
 
 	let get_sorted_chunks writer =
@@ -2216,11 +2270,12 @@ module HxbWriter = struct
 end
 
 let create config warn anon_id =
-	let cp = StringPool.create () in
+	let cp = StringPool.create() in
 	{
 		config;
 		warn;
 		anon_id;
+		identified_anons = IdentityPool.create();
 		current_module = null_module;
 		chunks = DynArray.create ();
 		cp = cp;
@@ -2244,7 +2299,10 @@ let create config warn anon_id =
 		field_type_parameters = IdentityPool.create ();
 		local_type_parameters = IdentityPool.create ();
 		field_stack = [];
+		wrote_local_type_param = false;
+		needs_local_context = false;
 		unbound_ttp = IdentityPool.create ();
+		unclosed_mono = IdentityPool.create ();
 		t_instance_chunk = Chunk.create EOM cp 32;
 	}
 

+ 1 - 1
src/compiler/hxb/hxbWriterConfig.ml

@@ -115,4 +115,4 @@ let process_argument file =
 		| _ ->
 			config.archive_path <- file;
 	end;
-	Some config
+	Some config

+ 91 - 79
src/compiler/messageReporting.ml

@@ -4,63 +4,57 @@ open Common
 open CompilationContext
 
 let resolve_source file l1 p1 l2 p2 =
-	let ch = open_in_bin file in
-	let curline = ref 1 in
-	let lines = ref [] in
-	let rec loop p line =
-		let inc i line =
-			if (!curline >= l1) && (!curline <= l2) then lines := (!curline, line) :: !lines;
-			curline := !curline + 1;
-			(i, "")
-		in
-
-		let input_char_or_done ch line =
-			try input_char ch with End_of_file -> begin
-				ignore(inc 0 line);
-				raise End_of_file
-			end
-		in
+	if l1 = l2 && p1 = p2 && l1 = 1 && p1 = 1 then []
+	else begin
+		let ch = open_in_bin file in
+		let curline = ref 1 in
+		let lines = ref [] in
+		let rec loop p line =
+			let inc i line =
+				if (!curline >= l1) && (!curline <= l2) then lines := (!curline, line) :: !lines;
+				incr curline;
+				(i, "")
+			in
 
-		let read_char line = match input_char_or_done ch line with
-			| '\n' -> inc 1 line
-			| '\r' ->
-				ignore(input_char_or_done ch line);
-				inc 2 line
-			| c -> begin
-				let line = ref (line ^ (String.make 1 c)) in
-				let rec skip n =
-					if n > 0 then begin
-						let c = input_char_or_done ch !line in
-						line := !line ^ (String.make 1 c);
-						skip (n - 1)
-					end
-				in
+			let input_char_or_done ch line =
+				try input_char ch with End_of_file -> begin
+					ignore(inc 0 line);
+					raise End_of_file
+				end
+			in
 
-				let code = int_of_char c in
-				if code < 0xC0 then ()
-				else if code < 0xE0 then skip 1
-				else if code < 0xF0 then skip 2
-				else skip 3;
+			let read_char line = match input_char_or_done ch line with
+				| '\n' -> inc 1 line
+				| '\r' ->
+					ignore(input_char_or_done ch line);
+					inc 2 line
+				| c -> begin
+					let line = ref (line ^ (String.make 1 c)) in
+					let rec skip n =
+						if n > 0 then begin
+							let c = input_char_or_done ch !line in
+							line := !line ^ (String.make 1 c);
+							skip (n - 1)
+						end
+					in
+
+					let code = int_of_char c in
+					if code < 0xC0 then ()
+					else if code < 0xE0 then skip 1
+					else if code < 0xF0 then skip 2
+					else skip 3;
+
+					(1, !line)
+				end
+			in
 
-				(1, !line)
-			end
+			let (delta, line) = read_char line in
+			loop (p + delta) line
 		in
 
-		let (delta, line) = read_char line in
-		loop (p + delta) line
-	in
-
-	try loop 0 ""; with End_of_file -> close_in ch;
-	List.rev !lines
-
-let resolve_file ctx f =
-	let ext = StringHelper.extension f in
-	let second_ext = StringHelper.extension (StringHelper.remove_extension f) in
-	let platform_ext = "." ^ (platform_name_macro ctx) in
-	if platform_ext = second_ext then
-		(StringHelper.remove_extension (StringHelper.remove_extension f)) ^ ext
-	else
-		f
+		try loop 0 ""; with End_of_file -> close_in ch;
+		List.rev !lines
+	end
 
 let error_printer file line = Printf.sprintf "%s:%d:" file line
 
@@ -80,7 +74,7 @@ let create_error_context absolute_positions = {
 	previous = None;
 }
 
-let compiler_pretty_message_string com ectx cm =
+let compiler_pretty_message_string defines ectx cm =
 	match cm.cm_message with
 	(* Filter some messages that don't add much when using this message renderer *)
 	| "End of overload failure reasons" -> None
@@ -95,12 +89,11 @@ let compiler_pretty_message_string com ectx cm =
 				let epos = if is_unknown_file cm.cm_pos.pfile then "(unknown position)" else cm.cm_pos.pfile in
 				(-1, -1, -1, -1, epos, [])
 			end else try begin
-				let f = resolve_file com cm.cm_pos.pfile in
-				let f = Common.find_file com f in
 				let l1, p1, l2, p2 = Lexer.get_pos_coords cm.cm_pos in
-				let lines = resolve_source f l1 p1 l2 p2 in
+				let lines = resolve_source cm.cm_pos.pfile l1 p1 l2 p2 in
 				let epos =
-					if ectx.absolute_positions then TPrinting.Printer.s_pos cm.cm_pos
+					if lines = [] then cm.cm_pos.pfile
+					else if ectx.absolute_positions then TPrinting.Printer.s_pos cm.cm_pos
 					else Lexer.get_error_pos error_printer cm.cm_pos
 				in
 				(l1, p1, l2, p2, epos, lines)
@@ -149,7 +142,7 @@ let compiler_pretty_message_string com ectx cm =
 
 		let gutter_len = (try String.length (Printf.sprintf "%d" (IntMap.find cm.cm_depth ectx.max_lines)) with Not_found -> 0) + 2 in
 
-		let no_color = Define.defined com.defines Define.MessageNoColor in
+		let no_color = Define.defined defines Define.MessageNoColor in
 		let c_reset = if no_color then "" else "\x1b[0m" in
 		let c_bold = if no_color then "" else "\x1b[1m" in
 		let c_dim = if no_color then "" else "\x1b[2m" in
@@ -180,6 +173,20 @@ let compiler_pretty_message_string com ectx cm =
 				(* File + line pointer *)
 				epos;
 
+		(* Macros can send all sorts of bad positions; avoid failing too hard *)
+		let safe_sub s pos len =
+			if len < 0 then ""
+			else
+				let pos = if pos < 0 then 0 else pos in
+				let slen = String.length s in
+				if pos >= slen then ""
+				else
+					let len = if (pos + len) > slen then slen - pos else len in
+					try String.sub s pos len with
+					(* Should not happen anymore, but still better than a crash if I missed some case... *)
+					| Invalid_argument _ -> (Printf.sprintf "[%s;%i;%i]" s pos len)
+		in
+
 		(* Error source *)
 		if display_source then out := List.fold_left (fun out (l, line) ->
 			let nb_len = String.length (string_of_int l) in
@@ -198,18 +205,18 @@ let compiler_pretty_message_string com ectx cm =
 					if l = 0 then
 						c_dim ^ line ^ c_reset
 					else if l1 = l2 then
-						(if p1 > 1 then c_dim ^ (String.sub line 0 (p1-1)) else "")
-						^ c_reset ^ c_bold ^ (String.sub line (p1-1) (p2-p1))
-						^ c_reset ^ c_dim ^ (String.sub line (p2-1) (len - p2 + 1))
+						(if p1 > 1 then c_dim ^ (safe_sub line 0 (p1-1)) else "")
+						^ c_reset ^ c_bold ^ (safe_sub line (p1-1) (p2-p1))
+						^ c_reset ^ c_dim ^ (safe_sub line (p2-1) (len - p2 + 1))
 						^ c_reset
 					else begin
 						(if (l = l1) then
-							(if p1 > 1 then c_dim ^ (String.sub line 0 (p1-1)) else "")
-							^ c_reset ^ c_bold ^ (String.sub line (p1-1) (len-p1+1))
+							c_dim ^ (safe_sub line 0 (p1-1))
+							^ c_reset ^ c_bold ^ (safe_sub line (p1-1) (len-p1+1))
 							^ c_reset
 						else if (l = l2) then
-							(if p2 > 1 then c_bold ^ (String.sub line 0 (p2-1)) else "")
-							^ c_reset ^ c_dim ^ (String.sub line (p2-1) (len-p2+1))
+							c_bold ^ (safe_sub line 0 (p2-1))
+							^ c_reset ^ c_dim ^ (safe_sub line (p2-1) (len-p2+1))
 							^ c_reset
 						else c_bold ^ line ^ c_reset)
 					end
@@ -309,21 +316,21 @@ let get_max_line max_lines messages =
 		else max_lines
 	) max_lines messages
 
-let display_source_at com p =
-	let absolute_positions = Define.defined com.defines Define.MessageAbsolutePositions in
+let display_source_at defines p =
+	let absolute_positions = Define.defined defines Define.MessageAbsolutePositions in
 	let ectx = create_error_context absolute_positions in
 	let msg = make_compiler_message "" p 0 MessageKind.DKCompilerMessage MessageSeverity.Information in
 	ectx.max_lines <- get_max_line ectx.max_lines [msg];
-	match compiler_pretty_message_string com ectx msg with
+	match compiler_pretty_message_string defines ectx msg with
 		| None -> ()
 		| Some s -> prerr_endline s
 
 exception ConfigError of string
 
-let get_formatter com def default =
-	let format_mode = Define.defined_value_safe ~default com.defines def in
+let get_formatter defines def default =
+	let format_mode = Define.defined_value_safe ~default defines def in
 	match format_mode with
-		| "pretty" -> compiler_pretty_message_string com
+		| "pretty" -> compiler_pretty_message_string defines
 		| "indent" -> compiler_indented_message_string
 		| "classic" -> compiler_message_string
 		| m -> begin
@@ -338,11 +345,11 @@ let print_error (err : Error.error) =
 	) err;
 	!ret
 
-let format_messages com messages =
-	let absolute_positions = Define.defined com.defines Define.MessageAbsolutePositions in
+let format_messages defines messages =
+	let absolute_positions = Define.defined defines Define.MessageAbsolutePositions in
 	let ectx = create_error_context absolute_positions in
 	ectx.max_lines <- get_max_line ectx.max_lines messages;
-	let message_formatter = get_formatter com Define.MessageReporting "classic" in
+	let message_formatter = get_formatter defines Define.MessageReporting "pretty" in
 	let lines = List.rev (
 		List.fold_left (fun lines cm -> match (message_formatter ectx cm) with
 			| None -> lines
@@ -356,15 +363,20 @@ let display_messages ctx on_message = begin
 	let ectx = create_error_context absolute_positions in
 	ectx.max_lines <- get_max_line ectx.max_lines ctx.messages;
 
+	let error msg =
+		ctx.has_error <- true;
+		on_message MessageSeverity.Error msg
+	in
+
 	let get_formatter _ def default =
-		try get_formatter ctx.com def default
+		try get_formatter ctx.com.defines def default
 		with | ConfigError s ->
-			error ctx s null_pos;
+			error s;
 			compiler_message_string
 	in
 
-	let message_formatter = get_formatter ctx.com Define.MessageReporting "classic" in
-	let log_formatter = get_formatter ctx.com Define.MessageLogFormat "indent" in
+	let message_formatter = get_formatter ctx.com.defines Define.MessageReporting "pretty" in
+	let log_formatter = get_formatter ctx.com.defines Define.MessageLogFormat "indent" in
 
 	let log_messages = ref (Define.defined ctx.com.defines Define.MessageLogFile) in
 	let log_message = ref None in
@@ -393,7 +405,7 @@ let display_messages ctx on_message = begin
 		end with
 			| Failure e | Sys_error e -> begin
 				let def = Define.get_define_key Define.MessageLogFile in
-				error ctx (Printf.sprintf "Error opening log file: %s. Logging to file disabled (-D %s)" e def) null_pos;
+				error (Printf.sprintf "Error opening log file: %s. Logging to file disabled (-D %s)" e def);
 				log_messages := false;
 			end
 	end;

+ 79 - 69
src/compiler/server.ml

@@ -1,7 +1,6 @@
 open Globals
 open Common
 open CompilationCache
-open Timer
 open Type
 open DisplayProcessingGlobals
 open Ipaddr
@@ -54,16 +53,16 @@ let parse_file cs com (rfile : ClassPaths.resolved_file) p =
 		TypeloadParse.parse_file_from_string com file p stdin
 	| _ ->
 		let ftime = file_time ffile in
-		let data = Std.finally (Timer.timer ["server";"parser cache"]) (fun () ->
+		let data = Std.finally (Timer.start_timer com.timer_ctx ["server";"parser cache"]) (fun () ->
 			try
 				let cfile = cc#find_file fkey in
 				if cfile.c_time <> ftime then raise Not_found;
-				Parser.ParseSuccess((cfile.c_package,cfile.c_decls),false,cfile.c_pdi)
+				Parser.ParseSuccess((cfile.c_package,cfile.c_decls),cfile.c_pdi)
 			with Not_found ->
 				let parse_result = TypeloadParse.parse_file com rfile p in
 				let info,is_unusual = match parse_result with
 					| ParseError(_,_,_) -> "not cached, has parse error",true
-					| ParseSuccess(data,is_display_file,pdi) ->
+					| ParseSuccess(data,pdi) ->
 						if is_display_file then begin
 							if pdi.pd_errors <> [] then
 								"not cached, is display file with parse errors",true
@@ -76,7 +75,7 @@ let parse_file cs com (rfile : ClassPaths.resolved_file) p =
 							(* We assume that when not in display mode it's okay to cache stuff that has #if display
 							checks. The reasoning is that non-display mode has more information than display mode. *)
 							if com.display.dms_full_typing then raise Not_found;
-							let ident = Hashtbl.find Parser.special_identifier_files fkey in
+							let ident = ThreadSafeHashtbl.find com.parser_state.special_identifier_files fkey in
 							Printf.sprintf "not cached, using \"%s\" define" ident,true
 						with Not_found ->
 							cc#cache_file fkey (ClassPaths.create_resolved_file ffile rfile.class_path) ftime data pdi;
@@ -113,10 +112,9 @@ module Communication = struct
 				end;
 				flush stdout;
 			);
-			exit = (fun code ->
+			exit = (fun timer_ctx code ->
 				if code = 0 then begin
-					Timer.close_times();
-					if !Timer.measure_times then Timer.report_times (fun s -> self.write_err (s ^ "\n"));
+					if timer_ctx.measure_times = Yes then Timer.report_times timer_ctx (fun s -> self.write_err (s ^ "\n"));
 				end;
 				exit code;
 			);
@@ -141,15 +139,13 @@ module Communication = struct
 
 					sctx.was_compilation <- ctx.com.display.dms_full_typing;
 					if has_error ctx then begin
-						measure_times := false;
+						ctx.timer_ctx.measure_times <- No;
 						write "\x02\n"
-					end else begin
-						Timer.close_times();
-						if !Timer.measure_times then Timer.report_times (fun s -> self.write_err (s ^ "\n"));
-					end
+					end else
+						if ctx.timer_ctx.measure_times = Yes then Timer.report_times ctx.timer_ctx (fun s -> self.write_err (s ^ "\n"));
 				)
 			);
-			exit = (fun i ->
+			exit = (fun timer_ctx i ->
 				()
 			);
 			is_server = true;
@@ -163,7 +159,6 @@ let stat dir =
 
 (* Gets a list of changed directories for the current compilation. *)
 let get_changed_directories sctx com =
-	let t = Timer.timer ["server";"module cache";"changed dirs"] in
 	let cs = sctx.cs in
 	let sign = Define.get_signature com.defines in
 	let dirs = try
@@ -223,9 +218,18 @@ let get_changed_directories sctx com =
 		Hashtbl.add sctx.changed_directories sign dirs;
 		dirs
 	in
-	t();
 	dirs
 
+let get_changed_directories sctx com =
+	Timer.time com.Common.timer_ctx ["server";"module cache";"changed dirs"] (get_changed_directories sctx) com
+
+let full_typing com m_extra =
+	com.is_macro_context
+	|| com.display.dms_full_typing
+	|| Define.defined com.defines Define.DisableHxbCache
+	|| Define.defined com.defines Define.DisableHxbOptimizations
+	|| DisplayPosition.display_position#is_in_file (Path.UniqueKey.lazy_key m_extra.m_file)
+
 (* Checks if module [m] can be reused from the cache and returns None in that case. Otherwise, returns
    [Some m'] where [m'] is the module responsible for [m] not being reusable. *)
 let check_module sctx com m_path m_extra p =
@@ -291,7 +295,7 @@ let check_module sctx com m_path m_extra p =
 				end
 		in
 		let has_policy policy = List.mem policy m_extra.m_check_policy || match policy with
-			| NoCheckShadowing | NoCheckFileTimeModification when !ServerConfig.do_not_check_modules && !Parser.display_mode <> DMNone -> true
+			| NoFileSystemCheck when !ServerConfig.do_not_check_modules && com.display.dms_kind <> DMNone -> true
 			| _ -> false
 		in
 		let check_file () =
@@ -321,13 +325,13 @@ let check_module sctx com m_path m_extra p =
 				match check mpath m2_extra with
 				| None -> ()
 				| Some reason -> raise (Dirty (DependencyDirty(mpath,reason)))
-			) m_extra.m_deps;
+			) m_extra.m_deps
 		in
 		let check () =
 			try
-				if not (has_policy NoCheckShadowing) then check_module_path();
-				if not (has_policy NoCheckFileTimeModification) || Path.file_extension (Path.UniqueKey.lazy_path m_extra.m_file) <> "hx" then check_file();
-				if not (has_policy NoCheckDependencies) then check_dependencies();
+				check_module_path();
+				if not (has_policy NoFileSystemCheck) || Path.file_extension (Path.UniqueKey.lazy_path m_extra.m_file) <> "hx" then check_file();
+				if full_typing com m_extra then check_dependencies();
 				None
 			with
 			| Dirty reason ->
@@ -386,9 +390,24 @@ let check_module sctx com m_path m_extra p =
 	end;
 	state
 
+let get_hxb_module com cc path =
+	try
+		let mc = cc#get_hxb_module path in
+		if not (full_typing com mc.mc_extra) then begin
+			mc.mc_extra.m_cache_state <- MSGood;
+			BinaryModule mc
+		end else
+			begin match mc.mc_extra.m_cache_state with
+				| MSBad reason -> BadModule reason
+				| _ -> BinaryModule mc
+			end
+	with Not_found ->
+		NoModule
+
 class hxb_reader_api_server
 	(com : Common.context)
 	(cc : context_cache)
+	(delay : TyperPass.typer_pass -> (unit -> unit) -> unit)
 = object(self)
 
 	method make_module (path : path) (file : string) =
@@ -398,7 +417,9 @@ class hxb_reader_api_server
 			m_path = path;
 			m_types = [];
 			m_statics = None;
-			m_extra = mc.mc_extra
+			(* Creating a new m_extra because if we keep the same reference, display requests *)
+			(* can alter it with bad data (for example adding dependencies that are not cached) *)
+			m_extra = { mc.mc_extra with m_deps = mc.mc_extra.m_deps; m_display_deps = None }
 		}
 
 	method add_module (m : module_def) =
@@ -414,36 +435,30 @@ class hxb_reader_api_server
 		| GoodModule m ->
 			m
 		| BinaryModule mc ->
-			let reader = new HxbReader.hxb_reader path com.hxb_reader_stats in
+			let reader = new HxbReader.hxb_reader path com.hxb_reader_stats (if Common.defined com Define.HxbTimes then Some com.timer_ctx else None) in
+			let full_restore = full_typing com mc.mc_extra in
 			let f_next chunks until =
-				let t_hxb = Timer.timer ["server";"module cache";"hxb read"] in
-				let r = reader#read_chunks_until (self :> HxbReaderApi.hxb_reader_api) chunks until in
-				t_hxb();
-				r
+				let macro = if com.is_macro_context then " (macro)" else "" in
+				let f  = reader#read_chunks_until (self :> HxbReaderApi.hxb_reader_api) chunks until in
+				Timer.time com.timer_ctx ["server";"module cache";"hxb read" ^ macro;"until " ^ (string_of_chunk_kind until)] f full_restore
 			in
-			let m,chunks = f_next mc.mc_chunks EOF in
+
+			let m,chunks = f_next mc.mc_chunks EOT in
 
 			(* We try to avoid reading expressions as much as possible, so we only do this for
 				 our current display file if we're in display mode. *)
-			let is_display_file = DisplayPosition.display_position#is_in_file (Path.UniqueKey.lazy_key m.m_extra.m_file) in
-			if is_display_file || com.display.dms_full_typing then ignore(f_next chunks EOM);
+			if full_restore then ignore(f_next chunks EOM)
+			else delay PConnectField (fun () -> ignore(f_next chunks EOF));
 			m
 		| BadModule reason ->
-			die (Printf.sprintf "Unexpected BadModule %s" (s_type_path path)) __LOC__
+			die (Printf.sprintf "Unexpected BadModule %s (%s)" (s_type_path path) (Printer.s_module_skip_reason reason)) __LOC__
 		| NoModule ->
 			die (Printf.sprintf "Unexpected NoModule %s" (s_type_path path)) __LOC__
 
 	method find_module (m_path : path) =
 		try
 			GoodModule (com.module_lut#find m_path)
-		with Not_found -> try
-			let mc = cc#get_hxb_module m_path in
-			begin match mc.mc_extra.m_cache_state with
-				| MSBad reason -> BadModule reason
-				| _ -> BinaryModule mc
-			end
-		with Not_found ->
-			NoModule
+		with Not_found -> get_hxb_module com cc m_path
 
 	method basic_types =
 		com.basic
@@ -452,7 +467,12 @@ class hxb_reader_api_server
 		i
 
 	method read_expression_eagerly (cf : tclass_field) =
-		com.display.dms_full_typing
+		com.is_macro_context || com.display.dms_full_typing || Define.defined com.defines Define.DisableHxbOptimizations
+
+	method make_lazy_type t f =
+		let r = make_unforced_lazy t f "server-api" in
+		delay PForce (fun () -> ignore(lazy_type r));
+		TLazy r
 end
 
 let handle_cache_bound_objects com cbol =
@@ -486,6 +506,7 @@ let rec add_modules sctx com delay (m : module_def) (from_binary : bool) (p : po
 				if not from_binary || m != m then
 					com.module_lut#add m.m_path m;
 				handle_cache_bound_objects com m.m_extra.m_cache_bound_objects;
+				let full_restore = full_typing com m.m_extra in
 				PMap.iter (fun _ mdep ->
 					let mpath = mdep.md_path in
 					if mdep.md_sign = own_sign then begin
@@ -504,7 +525,7 @@ let rec add_modules sctx com delay (m : module_def) (from_binary : bool) (p : po
 						in
 						add_modules (tabs ^ "  ") m0 m2
 					end
-				) m.m_extra.m_deps
+				) (if full_restore then m.m_extra.m_deps else Option.default m.m_extra.m_deps m.m_extra.m_display_deps)
 			)
 		end
 	in
@@ -513,23 +534,18 @@ let rec add_modules sctx com delay (m : module_def) (from_binary : bool) (p : po
 (* Looks up the module referred to by [mpath] in the cache. If it exists, a check is made to
    determine if it's still valid. If this function returns None, the module is re-typed. *)
 and type_module sctx com delay mpath p =
-	let t = Timer.timer ["server";"module cache"] in
+	let t = Timer.start_timer com.timer_ctx ["server";"module cache"] in
 	let cc = CommonCache.get_cache com in
 	let skip m_path reason =
 		ServerMessage.skipping_dep com "" (m_path,(Printer.s_module_skip_reason reason));
 		BadModule reason
 	in
 	let add_modules from_binary m =
-		let tadd = Timer.timer ["server";"module cache";"add modules"] in
-		add_modules sctx com delay m from_binary p;
-		tadd();
+		Timer.time com.timer_ctx ["server";"module cache";"add modules"] (add_modules sctx com delay m from_binary) p;
 		GoodModule m
 	in
 	let check_module sctx m_path m_extra p =
-		let tcheck = Timer.timer ["server";"module cache";"check"] in
-		let r = check_module sctx com mpath m_extra p in
-		tcheck();
-		r
+		Timer.time com.timer_ctx ["server";"module cache";"check"] (check_module sctx com mpath m_extra) p
 	in
 	let find_module_in_cache cc m_path p =
 		try
@@ -538,14 +554,7 @@ and type_module sctx com delay mpath p =
 				| MSBad reason -> BadModule reason
 				| _ -> GoodModule m
 			end;
-		with Not_found -> try
-			let mc = cc#get_hxb_module m_path in
-			begin match mc.mc_extra.m_cache_state with
-				| MSBad reason -> BadModule reason
-				| _ -> BinaryModule mc
-			end
-		with Not_found ->
-			NoModule
+		with Not_found -> get_hxb_module com cc m_path
 	in
 	(* Should not raise anything! *)
 	let m = match find_module_in_cache cc mpath p with
@@ -563,27 +572,27 @@ and type_module sctx com delay mpath p =
 			   checking dependencies. This means that the actual decoding never has any reason to fail. *)
 			begin match check_module sctx mpath mc.mc_extra p with
 				| None ->
-					let reader = new HxbReader.hxb_reader mpath com.hxb_reader_stats in
+					let reader = new HxbReader.hxb_reader mpath com.hxb_reader_stats (if Common.defined com Define.HxbTimes then Some com.timer_ctx else None) in
+					let full_restore = full_typing com mc.mc_extra in
 					let api = match com.hxb_reader_api with
 						| Some api ->
 							api
 						| None ->
-							let api = (new hxb_reader_api_server com cc :> HxbReaderApi.hxb_reader_api) in
+							let api = (new hxb_reader_api_server com cc delay :> HxbReaderApi.hxb_reader_api) in
 							com.hxb_reader_api <- Some api;
 							api
 					in
 					let f_next chunks until =
-						let t_hxb = Timer.timer ["server";"module cache";"hxb read"] in
-						let r = reader#read_chunks_until api chunks until in
-						t_hxb();
-						r
+						let macro = if com.is_macro_context then " (macro)" else "" in
+						Timer.time com.timer_ctx ["server";"module cache";"hxb read" ^ macro;"until " ^ (string_of_chunk_kind until)] (reader#read_chunks_until api chunks until) full_restore
 					in
-					let m,chunks = f_next mc.mc_chunks EOF in
+
+					let m,chunks = f_next mc.mc_chunks EOT in
+
 					(* We try to avoid reading expressions as much as possible, so we only do this for
 					   our current display file if we're in display mode. *)
-					let is_display_file = DisplayPosition.display_position#is_in_file (Path.UniqueKey.lazy_key m.m_extra.m_file) in
-					if is_display_file || com.display.dms_full_typing then ignore(f_next chunks EOM)
-					else delay (fun () -> ignore(f_next chunks EOM));
+					if full_restore then ignore(f_next chunks EOM)
+					else delay PConnectField (fun () -> ignore(f_next chunks EOF));
 					add_modules true m;
 				| Some reason ->
 					skip mpath reason
@@ -624,6 +633,7 @@ let after_save sctx ctx =
 		maybe_cache_context sctx ctx.com
 
 let after_compilation sctx ctx =
+	sctx.cs#clear_temp_cache;
 	()
 
 let mk_length_prefixed_communication allow_nonblock chin chout =
@@ -762,7 +772,7 @@ let enable_cache_mode sctx =
 	TypeloadParse.parse_hook := parse_file sctx.cs
 
 let rec process sctx comm args =
-	let t0 = get_time() in
+	let t0 = Extc.time() in
 	ServerMessage.arguments args;
 	reset sctx;
 	let api = {
@@ -785,7 +795,7 @@ let rec process sctx comm args =
 	} in
 	Compiler.HighLevel.entry api comm args;
 	run_delays sctx;
-	ServerMessage.stats stats (get_time() -. t0)
+	ServerMessage.stats stats (Extc.time() -. t0)
 
 (* The server main loop. Waits for the [accept] call to then process the sent compilation
    parameters through [process_params]. *)

+ 2 - 10
src/compiler/serverCompilationContext.ml

@@ -1,5 +1,4 @@
 open Common
-open Timer
 open CompilationCache
 
 type t = {
@@ -45,22 +44,15 @@ let reset sctx =
 	Hashtbl.clear sctx.changed_directories;
 	sctx.was_compilation <- false;
 	Parser.reset_state();
-	Lexer.cur := Lexer.make_file "";
-	measure_times := false;
 	Hashtbl.clear DeprecationCheck.warned_positions;
-	close_times();
 	stats.s_files_parsed := 0;
 	stats.s_classes_built := 0;
 	stats.s_methods_typed := 0;
-	stats.s_macros_called := 0;
-	Hashtbl.clear Timer.htimers;
-	Helper.start_time := get_time()
+	stats.s_macros_called := 0
 
 let maybe_cache_context sctx com =
 	if com.display.dms_full_typing && com.display.dms_populate_cache then begin
-		let t = Timer.timer ["server";"cache context"] in
-		CommonCache.cache_context sctx.cs com;
-		t();
+		Timer.time com.timer_ctx ["server";"cache context"] (CommonCache.cache_context sctx.cs) com;
 		ServerMessage.cached_modules com "" (List.length com.modules);
 	end
 

+ 1 - 3
src/compiler/serverConfig.ml

@@ -1,11 +1,9 @@
 let do_not_check_modules = ref false
-let populate_cache_from_display = ref true
 let legacy_completion = ref false
 
 let max_completion_items = ref 0
 
 let reset () =
 	do_not_check_modules := false;
-	populate_cache_from_display := true;
 	legacy_completion := false;
-	max_completion_items := 0
+	max_completion_items := 0

+ 2 - 2
src/compiler/tasks.ml

@@ -6,7 +6,7 @@ class gc_task (max_working_memory : float) (heap_size : float) = object(self)
 	inherit server_task ["gc"] 100
 
 	method private execute =
-		let t0 = Timer.get_time() in
+		let t0 = Extc.time() in
 		let stats = Gc.stat() in
 		let live_words = float_of_int stats.live_words in
 		(* Maximum heap size needed for the last X compilations = sum of what's live + max working memory. *)
@@ -27,7 +27,7 @@ class gc_task (max_working_memory : float) (heap_size : float) = object(self)
 			Gc.full_major();
 		end;
 		Gc.set old_gc;
-		ServerMessage.gc_stats (Timer.get_time() -. t0) stats do_compact new_space_overhead
+		ServerMessage.gc_stats (Extc.time() -. t0) stats do_compact new_space_overhead
 end
 
 class class_maintenance_task (cs : CompilationCache.t) (c : tclass) = object(self)

+ 40 - 27
src/context/abstractCast.ml

@@ -7,6 +7,7 @@ open Error
 
 let cast_stack = new_rec_stack()
 
+
 let rec make_static_call ctx c cf a pl args t p =
 	if cf.cf_kind = Method MethMacro then begin
 		match args with
@@ -23,7 +24,7 @@ let rec make_static_call ctx c cf a pl args t p =
 				e
 			| _ -> die "" __LOC__
 	end else
-		Typecore.make_static_call ctx c cf (apply_params a.a_params pl) args t p
+		CallUnification.make_static_call_better ctx c cf pl args t p
 
 and do_check_cast ctx uctx tleft eright p =
 	let recurse cf f =
@@ -87,10 +88,19 @@ and do_check_cast ctx uctx tleft eright p =
 						loop2 a.a_to
 					end
 				| TInst(c,tl), TFun _ when has_class_flag c CFunctionalInterface ->
-					let cf = ctx.g.functional_interface_lut#find c.cl_path in
+					let cf = try
+						snd (ctx.com.functional_interface_lut#find c.cl_path)
+					with Not_found -> match TClass.get_singular_interface_field c.cl_ordered_fields with
+						| None ->
+							raise Not_found
+						| Some cf ->
+							ctx.com.functional_interface_lut#add c.cl_path (c,cf);
+							cf
+					in
 					let map = apply_params c.cl_params tl in
 					let monos = Monomorph.spawn_constrained_monos map cf.cf_params in
-					unify_raise_custom uctx eright.etype (map (apply_params cf.cf_params monos cf.cf_type)) p;
+					unify_raise_custom native_unification_context eright.etype (map (apply_params cf.cf_params monos cf.cf_type)) p;
+					if has_mono tright then raise_typing_error ("Cannot use this function as a functional interface because it has unknown types: " ^ (s_type (print_context()) tright)) p;
 					eright
 				| _ ->
 					raise Not_found
@@ -101,7 +111,7 @@ and do_check_cast ctx uctx tleft eright p =
 
 and cast_or_unify_raise ctx ?(uctx=None) tleft eright p =
 	let uctx = match uctx with
-		| None -> default_unification_context
+		| None -> default_unification_context ()
 		| Some uctx -> uctx
 	in
 	try
@@ -118,7 +128,7 @@ and cast_or_unify ctx tleft eright p =
 		eright
 
 let prepare_array_access_field ctx a pl cf p =
-	let monos = List.map (fun _ -> spawn_monomorph ctx.e p) cf.cf_params in
+	let monos = List.map (fun _ -> spawn_monomorph ctx p) cf.cf_params in
 	let map t = apply_params a.a_params pl (apply_params cf.cf_params monos t) in
 	let check_constraints () =
 		List.iter2 (fun m ttp -> match get_constraints ttp with
@@ -190,11 +200,11 @@ let find_array_write_access ctx a tl e1 e2 p =
 		let s_type = s_type (print_context()) in
 		raise_typing_error (Printf.sprintf "No @:arrayAccess function for %s accepts arguments of %s and %s" (s_type (TAbstract(a,tl))) (s_type e1.etype) (s_type e2.etype)) p
 
-let find_multitype_specialization com a pl p =
-	let uctx = default_unification_context in
+let find_multitype_specialization' platform a pl p =
+	let uctx = default_unification_context () in
 	let m = mk_mono() in
 	let tl,definitive_types = Abstract.find_multitype_params a pl in
-	if com.platform = Globals.Js && a.a_path = (["haxe";"ds"],"Map") then begin match tl with
+	if platform = Globals.Js && a.a_path = (["haxe";"ds"],"Map") then begin match tl with
 		| t1 :: _ ->
 			let stack = ref [] in
 			let rec loop t =
@@ -232,10 +242,14 @@ let find_multitype_specialization com a pl p =
 			else
 				raise_typing_error ("Abstract " ^ (s_type_path a.a_path) ^ " has no @:to function that accepts " ^ st) p;
 	in
-	cf, follow m
+	cf,follow m,tl
+
+let find_multitype_specialization platform a pl p =
+	let cf,m,_ = find_multitype_specialization' platform a pl p in
+	(cf,m)
 
-let handle_abstract_casts ctx e =
-	let rec loop ctx e = match e.eexpr with
+let handle_abstract_casts (scom : SafeCom.t) e =
+	let rec loop e = match e.eexpr with
 		| TNew({cl_kind = KAbstractImpl a} as c,pl,el) ->
 			if not (Meta.has Meta.MultiType a.a_meta) then begin
 				(* This must have been a @:generic expansion with a { new } constraint (issue #4364). In this case
@@ -245,24 +259,22 @@ let handle_abstract_casts ctx e =
 				| _ -> raise_typing_error ("Cannot construct " ^ (s_type (print_context()) (TAbstract(a,pl)))) e.epos
 			end else begin
 				(* a TNew of an abstract implementation is only generated if it is a multi type abstract *)
-				let cf,m = find_multitype_specialization ctx.com a pl e.epos in
-				let e = make_static_call ctx c cf a pl ((mk (TConst TNull) (TAbstract(a,pl)) e.epos) :: el) m e.epos in
+				let cf,m,pl = find_multitype_specialization' scom.platform a pl e.epos in
+				let e = ExceptionFunctions.make_static_call scom c cf ((mk (TConst TNull) (TAbstract(a,pl)) e.epos) :: el)  m e.epos in
 				{e with etype = m}
 			end
 		| TCall({eexpr = TField(_,FStatic({cl_path=[],"Std"},{cf_name = "string"}))},[e1]) when (match follow e1.etype with TAbstract({a_impl = Some _},_) -> true | _ -> false) ->
 			begin match follow e1.etype with
-				| TAbstract({a_impl = Some c} as a,tl) ->
+				| TAbstract({a_impl = Some c},tl) ->
 					begin try
 						let cf = PMap.find "toString" c.cl_statics in
-						let call() = make_static_call ctx c cf a tl [e1] ctx.t.tstring e.epos in
-						if not ctx.allow_transform then
-							{ e1 with etype = ctx.t.tstring; epos = e.epos }
-						else if not (is_nullable e1.etype) then
+						let call() = ExceptionFunctions.make_static_call scom c cf [e1] scom.basic.tstring e.epos in
+						if not (is_nullable e1.etype) then
 							call()
 						else begin
 							let p = e.epos in
-							let chk_null = mk (TBinop (Ast.OpEq, e1, mk (TConst TNull) e1.etype p)) ctx.com.basic.tbool p in
-							mk (TIf (chk_null, mk (TConst (TString "null")) ctx.com.basic.tstring p, Some (call()))) ctx.com.basic.tstring p
+							let chk_null = mk (TBinop (Ast.OpEq, e1, mk (TConst TNull) e1.etype p)) scom.basic.tbool p in
+							mk (TIf (chk_null, mk (TConst (TString "null")) scom.basic.tstring p, Some (call()))) scom.basic.tstring p
 						end
 					with Not_found ->
 						e
@@ -288,9 +300,10 @@ let handle_abstract_casts ctx e =
 						{e1 with eexpr = TCast(find_field e2,None)}
 					| TField(e2,fa) ->
 						let a,pl,e2 = find_abstract e2 e2.etype in
+						let e2 = loop e2 in
 						let m = Abstract.get_underlying_type a pl in
 						let fname = field_name fa in
-						let el = List.map (loop ctx) el in
+						let el = List.map loop el in
 						begin try
 							let fa = quick_field m fname in
 							let get_fun_type t = match follow t with
@@ -329,14 +342,14 @@ let handle_abstract_casts ctx e =
 								else
 									el
 							in
-							let ecall = make_call ctx ef el tr e.epos in
+							let ecall = ExceptionFunctions.make_call scom ef el tr e.epos in
 							maybe_cast ecall e.etype e.epos
 						with Not_found ->
 							(* quick_field raises Not_found if m is an abstract, we have to replicate the 'using' call here *)
 							match follow m with
-							| TAbstract({a_impl = Some c} as a,pl) ->
+							| TAbstract({a_impl = Some c},pl) ->
 								let cf = PMap.find fname c.cl_statics in
-								make_static_call ctx c cf a pl (e2 :: el) e.etype e.epos
+								ExceptionFunctions.make_static_call scom c cf  (e2 :: el) e.etype e.epos
 							| _ -> raise Not_found
 						end
 					| _ ->
@@ -344,11 +357,11 @@ let handle_abstract_casts ctx e =
 				in
 				find_field e1
 			with Not_found ->
-				Type.map_expr (loop ctx) e
+				Type.map_expr loop e
 			end
 		| _ ->
-			Type.map_expr (loop ctx) e
+			Type.map_expr loop e
 	in
-	loop ctx e
+	loop e
 ;;
 Typecore.cast_or_unify_raise_ref := cast_or_unify_raise

+ 100 - 257
src/context/common.ml

@@ -16,7 +16,6 @@
 	along with this program; if not, write to the Free Software
 	Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
  *)
-open Extlib_leftovers
 open Ast
 open Type
 open Globals
@@ -46,127 +45,6 @@ type stats = {
 	s_macros_called : int ref;
 }
 
-(**
-	The capture policy tells which handling we make of captured locals
-	(the locals which are referenced in local functions)
-
-	See details/implementation in Codegen.captured_vars
-*)
-type capture_policy =
-	(** do nothing, let the platform handle it *)
-	| CPNone
-	(** wrap all captured variables into a single-element array to allow modifications *)
-	| CPWrapRef
-	(** similar to wrap ref, but will only apply to the locals that are declared in loops *)
-	| CPLoopVars
-
-type exceptions_config = {
-	(* Base types which may be thrown from Haxe code without wrapping. *)
-	ec_native_throws : path list;
-	(* Base types which may be caught from Haxe code without wrapping. *)
-	ec_native_catches : path list;
-	(*
-		Hint exceptions filter to avoid wrapping for targets, which can throw/catch any type
-		Ignored on targets with a specific native base type for exceptions.
-	*)
-	ec_avoid_wrapping : bool;
-	(* Path of a native class or interface, which can be used for wildcard catches. *)
-	ec_wildcard_catch : path;
-	(*
-		Path of a native base class or interface, which can be thrown.
-		This type is used to cast `haxe.Exception.thrown(v)` calls to.
-		For example `throw 123` is compiled to `throw (cast Exception.thrown(123):ec_base_throw)`
-	*)
-	ec_base_throw : path;
-	(*
-		Checks if throwing this expression is a special case for current target
-		and should not be modified.
-	*)
-	ec_special_throw : texpr -> bool;
-}
-
-type var_scope =
-	| FunctionScope
-	| BlockScope
-
-type var_scoping_flags =
-	(**
-		Variables are hoisted in their scope
-	*)
-	| VarHoisting
-	(**
-		It's not allowed to shadow existing variables in a scope.
-	*)
-	| NoShadowing
-	(**
-		It's not allowed to shadow a `catch` variable.
-	*)
-	| NoCatchVarShadowing
-	(**
-		Local vars cannot have the same name as the current top-level package or
-		(if in the root package) current class name
-	*)
-	| ReserveCurrentTopLevelSymbol
-	(**
-		Local vars cannot have a name used for any top-level symbol
-		(packages and classes in the root package)
-	*)
-	| ReserveAllTopLevelSymbols
-	(**
-		Reserve all type-paths converted to "flat path" with `Path.flat_path`
-	*)
-	| ReserveAllTypesFlat
-	(**
-		List of names cannot be taken by local vars
-	*)
-	| ReserveNames of string list
-	(**
-		Cases in a `switch` won't have blocks, but will share the same outer scope.
-	*)
-	| SwitchCasesNoBlocks
-
-type var_scoping_config = {
-	vs_flags : var_scoping_flags list;
-	vs_scope : var_scope;
-}
-
-type platform_config = {
-	(** has a static type system, with not-nullable basic types (Int/Float/Bool) *)
-	pf_static : bool;
-	(** has access to the "sys" package *)
-	pf_sys : bool;
-	(** captured variables handling (see before) *)
-	pf_capture_policy : capture_policy;
-	(** when calling a method with optional args, do we replace the missing args with "null" constants *)
-	pf_pad_nulls : bool;
-	(** add a final return to methods not having one already - prevent some compiler warnings *)
-	pf_add_final_return : bool;
-	(** does the platform natively support overloaded functions *)
-	pf_overload : bool;
-	(** can the platform use default values for non-nullable arguments *)
-	pf_can_skip_non_nullable_argument : bool;
-	(** type paths that are reserved on the platform *)
-	pf_reserved_type_paths : path list;
-	(** supports function == function **)
-	pf_supports_function_equality : bool;
-	(** uses utf16 encoding with ucs2 api **)
-	pf_uses_utf16 : bool;
-	(** target supports accessing `this` before calling `super(...)` **)
-	pf_this_before_super : bool;
-	(** target supports threads **)
-	pf_supports_threads : bool;
-	(** target supports Unicode **)
-	pf_supports_unicode : bool;
-	(** target supports rest arguments **)
-	pf_supports_rest_args : bool;
-	(** exceptions handling config **)
-	pf_exceptions : exceptions_config;
-	(** the scoping of local variables *)
-	pf_scoping : var_scoping_config;
-	(** target supports atomic operations via haxe.Atomic **)
-	pf_supports_atomics : bool;
-}
-
 class compiler_callbacks = object(self)
 	val before_typer_create = ref [];
 	val after_init_macros = ref [];
@@ -230,6 +108,14 @@ class file_keys = object(self)
 			let key = Path.UniqueKey.create file in
 			Hashtbl.add cache file key;
 			key
+
+	val virtual_counter = ref 0
+
+	method generate_virtual mpath step =
+		incr virtual_counter;
+		let base = match fst mpath with | [] -> "." | pack -> ExtLib.String.join "/" pack in
+		Printf.sprintf "%s/%s_%i_%i" base (snd mpath) step !virtual_counter
+
 end
 
 type shared_display_information = {
@@ -338,11 +224,14 @@ class virtual abstract_hxb_lib = object(self)
 	method virtual get_bytes : string -> path -> bytes option
 	method virtual close : unit
 	method virtual get_file_path : string
+	method virtual get_string_pool : string -> string array option
 end
 
-type context_main = {
-	mutable main_class : path option;
-	mutable main_expr : texpr option;
+type parser_state = {
+	mutable was_auto_triggered : bool;
+	mutable had_parser_resume : bool;
+	delayed_syntax_completion : Parser.syntax_completion_on option Atomic.t;
+	special_identifier_files : (Path.UniqueKey.t,string) ThreadSafeHashtbl.t;
 }
 
 type context = {
@@ -352,26 +241,29 @@ type context = {
 	mutable cache : CompilationCache.context_cache option;
 	is_macro_context : bool;
 	mutable json_out : json_api option;
+	timer_ctx : Timer.timer_context;
 	(* config *)
-	version : int;
+	version : compiler_version;
 	mutable args : string list;
 	mutable display : DisplayTypes.DisplayMode.settings;
 	mutable debug : bool;
 	mutable verbose : bool;
 	mutable foptimize : bool;
+	mutable doinline : bool;
 	mutable platform : platform;
-	mutable config : platform_config;
+	mutable config : PlatformConfig.platform_config;
 	empty_class_path : ClassPath.class_path;
 	class_paths : ClassPaths.class_paths;
-	main : context_main;
+	main : Gctx.context_main;
 	mutable package_rules : (string,package_rule) PMap.t;
 	mutable report_mode : report_mode;
+	parser_state : parser_state;
 	(* communication *)
 	mutable print : string -> unit;
-	mutable error : ?depth:int -> string -> pos -> unit;
+	mutable error : Gctx.error_function;
 	mutable error_ext : Error.error -> unit;
 	mutable info : ?depth:int -> ?from_macro:bool -> string -> pos -> unit;
-	mutable warning : ?depth:int -> ?from_macro:bool -> warning -> Warning.warning_option list list -> string -> pos -> unit;
+	mutable warning : Gctx.warning_function;
 	mutable warning_options : Warning.warning_option list list;
 	mutable get_messages : unit -> compiler_message list;
 	mutable filter_messages : (compiler_message -> bool) -> unit;
@@ -407,6 +299,7 @@ type context = {
 	mutable modules : Type.module_def list;
 	mutable types : Type.module_type list;
 	mutable resources : (string,string) Hashtbl.t;
+	functional_interface_lut : (path,(tclass * tclass_field)) lookup;
 	(* target-specific *)
 	mutable flash_version : float;
 	mutable neko_lib_paths : string list;
@@ -424,6 +317,32 @@ type context = {
 	mutable hxb_writer_config : HxbWriterConfig.t option;
 }
 
+let to_gctx com = {
+	Gctx.platform = com.platform;
+	defines = com.defines;
+	basic = com.basic;
+	class_paths = com.class_paths;
+	run_command = com.run_command;
+	run_command_args = com.run_command_args;
+	warning = com.warning;
+	error = com.error;
+	print = com.print;
+	debug = com.debug;
+	file = com.file;
+	version = com.version;
+	features = com.features;
+	modules = com.modules;
+	main = com.main;
+	types = com.types;
+	resources = com.resources;
+	native_libs = (match com.platform with
+		| Jvm -> (com.native_libs.java_libs :> NativeLibraries.native_library_base list)
+		| Flash -> (com.native_libs.swf_libs  :> NativeLibraries.native_library_base list)
+		| _ -> []);
+	include_files = com.include_files;
+	std = com.std;
+	timer_ctx = com.timer_ctx;
+}
 let enter_stage com stage =
 	(* print_endline (Printf.sprintf "Entering stage %s" (s_compiler_stage stage)); *)
 	com.stage <- stage
@@ -434,7 +353,7 @@ let ignore_error com =
 	b
 
 let module_warning com m w options msg p =
-	DynArray.add m.m_extra.m_cache_bound_objects (Warning(w,msg,p));
+	if com.display.dms_full_typing then DynArray.add m.m_extra.m_cache_bound_objects (Warning(w,msg,p));
 	com.warning w options msg p
 
 (* Defines *)
@@ -468,6 +387,7 @@ let convert_define k =
 	String.concat "_" (ExtString.String.nsplit k "-")
 
 let is_next com = defined com HaxeNext
+let fail_fast com = defined com FailFast
 
 let external_defined ctx k =
 	Define.raw_defined ctx.defines (convert_define k)
@@ -512,9 +432,6 @@ let defines_for_external ctx =
 			| split -> PMap.add (String.concat "-" split) v added_underscore;
 	) ctx.defines.values PMap.empty
 
-let get_es_version com =
-	try int_of_string (defined_value com Define.JsEs) with _ -> 0
-
 let short_platform_name = function
 	| Cross -> "x"
 	| Js -> "js"
@@ -537,6 +454,8 @@ let stats =
 		s_macros_called = ref 0;
 	}
 
+open PlatformConfig
+
 let default_config =
 	{
 		pf_static = true;
@@ -577,7 +496,7 @@ let get_config com =
 		(* impossible to reach. see update_platform_config *)
 		raise Exit
 	| Js ->
-		let es6 = get_es_version com >= 6 in
+		let es6 = Gctx.get_es_version com.defines >= 6 in
 		{
 			default_config with
 			pf_static = false;
@@ -622,6 +541,9 @@ let get_config com =
 			pf_supports_unicode = false;
 			pf_scoping = { default_config.pf_scoping with
 				vs_flags = [ReserveAllTopLevelSymbols];
+			};
+			pf_exceptions = { default_config.pf_exceptions with
+				ec_avoid_wrapping = false
 			}
 		}
 	| Flash ->
@@ -742,6 +664,9 @@ let get_config com =
 				vs_scope = BlockScope;
 				vs_flags = [NoShadowing]
 			};
+			pf_exceptions = { default_config.pf_exceptions with
+				ec_avoid_wrapping = false
+			}
 		}
 	| Eval ->
 		{
@@ -751,15 +676,19 @@ let get_config com =
 			pf_uses_utf16 = false;
 			pf_supports_threads = true;
 			pf_capture_policy = CPWrapRef;
+			pf_exceptions = { default_config.pf_exceptions with
+				ec_avoid_wrapping = false
+			}
 		}
 
 let memory_marker = [|Unix.time()|]
 
-let create compilation_step cs version args display_mode =
+let create timer_ctx compilation_step cs version args display_mode =
 	let rec com = {
 		compilation_step = compilation_step;
 		cs = cs;
 		cache = None;
+		timer_ctx = timer_ctx;
 		stage = CCreated;
 		version = version;
 		args = args;
@@ -777,6 +706,7 @@ let create compilation_step cs version args display_mode =
 		display = display_mode;
 		verbose = false;
 		foptimize = true;
+		doinline = true;
 		features = Hashtbl.create 0;
 		platform = Cross;
 		config = default_config;
@@ -786,7 +716,8 @@ let create compilation_step cs version args display_mode =
 		empty_class_path = new ClassPath.directory_class_path "" User;
 		class_paths = new ClassPaths.class_paths;
 		main = {
-			main_class = None;
+			main_path = None;
+			main_file = None;
 			main_expr = None;
 		};
 		package_rules = PMap.empty;
@@ -808,16 +739,13 @@ let create compilation_step cs version args display_mode =
 		include_files = [];
 		js_gen = None;
 		load_extern_type = [];
-		defines = {
-			defines_signature = None;
-			values = PMap.empty;
-		};
+		defines = Define.empty_defines ();
 		user_defines = Hashtbl.create 0;
 		user_metas = Hashtbl.create 0;
 		get_macros = (fun() -> None);
 		info = (fun ?depth ?from_macro _ _ -> die "" __LOC__);
 		warning = (fun ?depth ?from_macro _ _ _ -> die "" __LOC__);
-		warning_options = [];
+		warning_options = [List.map (fun w -> {wo_warning = w;wo_mode = WMDisable}) WarningList.disabled_warnings];
 		error = (fun ?depth _ _ -> die "" __LOC__);
 		error_ext = (fun _ -> die "" __LOC__);
 		get_messages = (fun() -> []);
@@ -825,12 +753,14 @@ let create compilation_step cs version args display_mode =
 		pass_debug_messages = DynArray.create();
 		basic = {
 			tvoid = mk_mono();
+			tany = mk_mono();
 			tint = mk_mono();
 			tfloat = mk_mono();
 			tbool = mk_mono();
 			tstring = mk_mono();
 			tnull = (fun _ -> die "Could use locate abstract Null<T> (was it redefined?)" __LOC__);
 			tarray = (fun _ -> die "Could not locate class Array<T> (was it redefined?)" __LOC__);
+			titerator = (fun _ -> die "Could not locate typedef Iterator<T> (was it redefined?)" __LOC__);
 		};
 		std = null_class;
 		file_keys = new file_keys;
@@ -845,9 +775,16 @@ let create compilation_step cs version args display_mode =
 		has_error = false;
 		report_mode = RMNone;
 		is_macro_context = false;
+		functional_interface_lut = new Lookup.hashtbl_lookup;
 		hxb_reader_api = None;
 		hxb_reader_stats = HxbReader.create_hxb_reader_stats ();
 		hxb_writer_config = None;
+		parser_state = {
+			was_auto_triggered = false;
+			had_parser_resume = false;
+			delayed_syntax_completion = Atomic.make None;
+			special_identifier_files = ThreadSafeHashtbl.create 0;
+		}
 	} in
 	com
 
@@ -869,15 +806,18 @@ let clone com is_macro_context =
 	let t = com.basic in
 	{ com with
 		cache = None;
+		stage = CCreated;
 		basic = { t with
 			tvoid = mk_mono();
+			tany = mk_mono();
 			tint = mk_mono();
 			tfloat = mk_mono();
 			tbool = mk_mono();
 			tstring = mk_mono();
 		};
 		main = {
-			main_class = None;
+			main_path = None;
+			main_file = None;
 			main_expr = None;
 		};
 		features = Hashtbl.create 0;
@@ -901,6 +841,7 @@ let clone com is_macro_context =
 		hxb_reader_api = None;
 		hxb_reader_stats = HxbReader.create_hxb_reader_stats ();
 		std = null_class;
+		functional_interface_lut = new Lookup.hashtbl_lookup;
 		empty_class_path = new ClassPath.directory_class_path "" User;
 		class_paths = new ClassPaths.class_paths;
 	}
@@ -913,27 +854,6 @@ let flash_versions = List.map (fun v ->
 	v, string_of_int maj ^ (if min = 0 then "" else "_" ^ string_of_int min)
 ) [9.;10.;10.1;10.2;10.3;11.;11.1;11.2;11.3;11.4;11.5;11.6;11.7;11.8;11.9;12.0;13.0;14.0;15.0;16.0;17.0;18.0;19.0;20.0;21.0;22.0;23.0;24.0;25.0;26.0;27.0;28.0;29.0;31.0;32.0]
 
-let flash_version_tag = function
-	| 6. -> 6
-	| 7. -> 7
-	| 8. -> 8
-	| 9. -> 9
-	| 10. | 10.1 -> 10
-	| 10.2 -> 11
-	| 10.3 -> 12
-	| 11. -> 13
-	| 11.1 -> 14
-	| 11.2 -> 15
-	| 11.3 -> 16
-	| 11.4 -> 17
-	| 11.5 -> 18
-	| 11.6 -> 19
-	| 11.7 -> 20
-	| 11.8 -> 21
-	| 11.9 -> 22
-	| v when v >= 12.0 && float_of_int (int_of_float v) = v -> int_of_float v + 11
-	| v -> failwith ("Invalid SWF version " ^ string_of_float v)
-
 let update_platform_config com =
 	match com.platform with
 	| CustomTarget _ ->
@@ -955,7 +875,7 @@ let init_platform com =
 	end;
 	(* Set the source header, unless the user has set one already or the platform sets a custom one *)
 	if not (defined com Define.SourceHeader) && (com.platform <> Hl) then
-		define_value com Define.SourceHeader ("Generated by Haxe " ^ s_version_full);
+		define_value com Define.SourceHeader ("Generated by Haxe " ^ (s_version_full com.version));
 	let forbid acc p = if p = name || PMap.mem p acc then acc else PMap.add p Forbidden acc in
 	com.package_rules <- List.fold_left forbid com.package_rules ("java" :: (List.map platform_name platforms));
 	update_platform_config com;
@@ -1065,10 +985,6 @@ let platform_name_macro com =
 let find_file ctx f =
 	(ctx.class_paths#find_file f).file
 
-(* let find_file ctx f =
-	let timer = Timer.timer ["find_file"] in
-	Std.finally timer (find_file ctx) f *)
-
 let mem_size v =
 	Objsize.size_with_headers (Objsize.objsize v [] [])
 
@@ -1079,90 +995,6 @@ let hash f =
 	done;
 	if Sys.word_size = 64 then Int32.to_int (Int32.shift_right (Int32.shift_left (Int32.of_int !h) 1) 1) else !h
 
-let url_encode s add_char =
-	let hex = "0123456789ABCDEF" in
-	for i = 0 to String.length s - 1 do
-		let c = String.unsafe_get s i in
-		match c with
-		| 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' | '-' | '.' ->
-			add_char c
-		| _ ->
-			add_char '%';
-			add_char (String.unsafe_get hex (int_of_char c lsr 4));
-			add_char (String.unsafe_get hex (int_of_char c land 0xF));
-	done
-
-let url_encode_s s =
-	let b = Buffer.create 0 in
-	url_encode s (Buffer.add_char b);
-	Buffer.contents b
-
-(* UTF8 *)
-
-let to_utf8 str p =
-	let u8 = try
-		UTF8.validate str;
-		str;
-	with
-		UTF8.Malformed_code ->
-			(* ISO to utf8 *)
-			let b = UTF8.Buf.create 0 in
-			String.iter (fun c -> UTF8.Buf.add_char b (UCharExt.of_char c)) str;
-			UTF8.Buf.contents b
-	in
-	let ccount = ref 0 in
-	UTF8.iter (fun c ->
-		let c = UCharExt.code c in
-		if (c >= 0xD800 && c <= 0xDFFF) || c >= 0x110000 then Error.abort "Invalid unicode char" p;
-		incr ccount;
-		if c > 0x10000 then incr ccount;
-	) u8;
-	u8, !ccount
-
-let utf16_add buf c =
-	let add c =
-		Buffer.add_char buf (char_of_int (c land 0xFF));
-		Buffer.add_char buf (char_of_int (c lsr 8));
-	in
-	if c >= 0 && c < 0x10000 then begin
-		if c >= 0xD800 && c <= 0xDFFF then failwith ("Invalid unicode char " ^ string_of_int c);
-		add c;
-	end else if c < 0x110000 then begin
-		let c = c - 0x10000 in
-		add ((c asr 10) + 0xD800);
-		add ((c land 1023) + 0xDC00);
-	end else
-		failwith ("Invalid unicode char " ^ string_of_int c)
-
-let utf8_to_utf16 str zt =
-	let b = Buffer.create (String.length str * 2) in
-	(try UTF8.iter (fun c -> utf16_add b (UCharExt.code c)) str with Invalid_argument _ | UCharExt.Out_of_range -> ()); (* if malformed *)
-	if zt then utf16_add b 0;
-	Buffer.contents b
-
-let utf16_to_utf8 str =
-	let b = Buffer.create 0 in
-	let add c = Buffer.add_char b (char_of_int (c land 0xFF)) in
-	let get i = int_of_char (String.unsafe_get str i) in
-	let rec loop i =
-		if i >= String.length str then ()
-		else begin
-			let c = get i in
-			if c < 0x80 then begin
-				add c;
-				loop (i + 2);
-			end else if c < 0x800 then begin
-				let c = c lor ((get (i + 1)) lsl 8) in
-				add c;
-				add (c lsr 8);
-				loop (i + 2);
-			end else
-				die "" __LOC__;
-		end
-	in
-	loop 0;
-	Buffer.contents b
-
 let add_diagnostics_message ?(depth = 0) ?(code = None) com s p kind sev =
 	if sev = MessageSeverity.Error then com.has_error <- true;
 	let di = com.shared.shared_display_information in
@@ -1179,9 +1011,6 @@ let display_error_ext com err =
 let display_error com ?(depth = 0) msg p =
 	display_error_ext com (Error.make_error ~depth (Custom msg) p)
 
-let dump_path com =
-	Define.defined_value_safe ~default:"dump" com.defines Define.DumpPath
-
 let adapt_defines_to_macro_context defines =
 	let to_remove = "java" :: List.map Globals.platform_name Globals.platforms in
 	let to_remove = List.fold_left (fun acc d -> Define.get_define_key d :: acc) to_remove [Define.NoTraces] in
@@ -1214,4 +1043,18 @@ let get_entry_point com =
 		in
 		let e = Option.get com.main.main_expr in (* must be present at this point *)
 		(snd path, c, e)
-	) com.main.main_class
+	) com.main.main_path
+
+let make_unforced_lazy t_proc f where =
+	let r = ref (lazy_available t_dynamic) in
+	r := lazy_wait (fun() ->
+		try
+			r := lazy_processing t_proc;
+			let t = f () in
+			r := lazy_available t;
+			t
+		with
+			| Error.Error e ->
+				raise (Error.Fatal_error e)
+	);
+	r

+ 34 - 15
src/context/commonCache.ml

@@ -11,7 +11,7 @@ class lib_build_task cs file ftime lib = object(self)
 		let h = Hashtbl.create 0 in
 		List.iter (fun path ->
 			if not (Hashtbl.mem h path) then begin
-				let p = { pfile = file ^ " @ " ^ Globals.s_type_path path; pmin = 0; pmax = 0; } in
+				let p = file_pos (file ^ " @ " ^ Globals.s_type_path path) in
 				try begin match lib#build path p with
 				| Some r -> Hashtbl.add h path r
 				| None -> ()
@@ -84,27 +84,46 @@ let get_cache_sign com = match com.Common.cache with
 let rec cache_context cs com =
 	let cc = get_cache com in
 	let sign = Define.get_signature com.defines in
-	let anon_identification = new Tanon_identification.tanon_identification in
-	let config = match com.hxb_writer_config with
-		| None ->
-			HxbWriterConfig.create_target_config ()
-		| Some config ->
-			if com.is_macro_context then config.macro_config else config.target_config
-	in
+
+	let parallels = DynArray.create () in
 	let cache_module m =
-		(* If we have a signature mismatch, look-up cache for module. Physical equality check is fine as a heueristic. *)
-		let cc = if m.m_extra.m_sign = sign then cc else cs#get_context m.m_extra.m_sign in
-		let warn w s p = com.warning w com.warning_options s p in
-		cc#cache_module config warn anon_identification m.m_path m;
+		if Define.defined com.defines DisableHxbCache then
+			(* If we have a signature mismatch, look-up cache for module. Physical equality check is fine as a heuristic. *)
+			let cc = if m.m_extra.m_sign = sign then cc else cs#get_context m.m_extra.m_sign in
+			cc#cache_module_in_memory m.m_path m;
+		else
+			let anon_identification = new Tanon_identification.tanon_identification in
+			let warn w s p = com.warning w com.warning_options s p in
+			let config = match com.hxb_writer_config with
+				| None ->
+					HxbWriterConfig.create_target_config ()
+				| Some config ->
+					if com.is_macro_context then config.macro_config else config.target_config
+			in
+			(* If we have a signature mismatch, look-up cache for module. Physical equality check is fine as a heuristic. *)
+			let cc = if m.m_extra.m_sign = sign then cc else cs#get_context m.m_extra.m_sign in
+			match cc#cache_hxb_module config warn anon_identification m with
+			| None ->
+				()
+			| Some f ->
+				DynArray.add parallels (cc,m,f)
 	in
 	List.iter cache_module com.modules;
+	let a = Parallel.run_in_new_pool com.timer_ctx (fun pool ->
+		Parallel.ParallelArray.map pool (fun (cc,m,f) ->
+			let chunks = f() in
+			(cc,m,chunks)
+		) (DynArray.to_array parallels) (cc,null_module,[])
+	) in
+	Array.iter (fun (cc,m,chunks) ->
+		cc#add_binary_cache m chunks
+	) a;
 	begin match com.get_macros() with
 		| None -> ()
 		| Some com -> cache_context cs com
 	end;
-	if Define.raw_defined com.defines "hxb.stats" then begin
-		HxbReader.dump_stats (platform_name com.platform) com.hxb_reader_stats;
-	end
+	if Define.raw_defined com.defines "hxb.stats" then
+		HxbReader.dump_stats (platform_name com.platform) com.hxb_reader_stats
 
 let maybe_add_context_sign cs com desc =
 	let sign = Define.get_signature com.defines in

+ 1 - 1
src/context/display/diagnostics.ml

@@ -77,7 +77,7 @@ let check_other_things com e =
 		| TCall({eexpr = TField(e1,fa)},el) when not in_value && PurityState.is_pure_field_access fa -> compound "call" el e.epos
 		| TNew _ | TCall _ | TBinop ((Ast.OpAssignOp _ | Ast.OpAssign),_,_) | TUnop ((Ast.Increment | Ast.Decrement),_,_)
 		| TReturn _ | TBreak | TContinue | TThrow _ | TCast (_,Some _)
-		| TIf _ | TTry _ | TSwitch _ | TWhile _ | TFor _ ->
+		| TIf _ | TTry _ | TSwitch _ | TWhile _ ->
 			had_effect := true;
 			Type.iter (loop true) e
 		| TParenthesis e1 | TMeta(_,e1) ->

+ 3 - 12
src/context/display/display.ml

@@ -31,25 +31,16 @@ module ReferencePosition = struct
 end
 
 let preprocess_expr com e = match com.display.dms_kind with
-	| DMDefinition | DMTypeDefinition | DMUsage _ | DMImplementation | DMHover | DMDefault -> ExprPreprocessing.find_before_pos com.display.dms_kind e
-	| DMSignature -> ExprPreprocessing.find_display_call e
+	| DMDefinition | DMTypeDefinition | DMUsage _ | DMImplementation | DMHover | DMDefault -> ExprPreprocessing.find_before_pos com.parser_state.was_auto_triggered com.display.dms_kind e
+	| DMSignature -> ExprPreprocessing.find_display_call com.parser_state.was_auto_triggered e
 	| _ -> e
 
-let get_expected_name with_type = match with_type with
-	| WithType.Value (Some src) | WithType.WithType(_,Some src) ->
-		(match src with
-		| WithType.FunctionArgument si -> Some si.si_name
-		| WithType.StructureField si -> Some si .si_name
-		| WithType.ImplicitReturn -> None
-		)
-	| _ -> None
-
 let sort_fields l with_type tk =
 	let p = match tk with
 		| TKExpr p | TKField p -> Some p
 		| _ -> None
 	in
-	let expected_name = get_expected_name with_type in
+	let expected_name = WithType.get_expected_name with_type in
 	let l = List.map (fun ci ->
 		let i = get_sort_index tk ci (Option.default Globals.null_pos p) expected_name in
 		ci,i

+ 2 - 1
src/context/display/displayException.ml

@@ -184,7 +184,8 @@ let to_json ctx de =
 		let named_source_kind = function
 			| WithType.FunctionArgument name -> (0, name)
 			| WithType.StructureField name -> (1, name)
-			| _ -> die "" __LOC__
+			| LocalVariable name -> (2, name)
+			| ImplicitReturn -> die "" __LOC__
 		in
 		let ctx = Genjson.create_context GMFull in
 		let generate_name kind =

+ 3 - 1
src/context/display/displayFields.ml

@@ -49,7 +49,7 @@ let collect_static_extensions ctx items e p =
 	let rec dup t = Type.map dup t in
 	let handle_field c f acc =
 		let f = { f with cf_type = opt_type f.cf_type } in
-		let monos = List.map (fun _ -> spawn_monomorph ctx.e p) f.cf_params in
+		let monos = List.map (fun _ -> spawn_monomorph ctx p) f.cf_params in
 		let map = apply_params f.cf_params monos in
 		match follow (map f.cf_type) with
 		| TFun((_,_,TType({t_path=["haxe";"macro"], "ExprOf"}, [t])) :: args, ret)
@@ -169,6 +169,8 @@ let collect ctx e_ast e dk with_type p =
 					end
 				end else
 					loop items (mk_anon ~fields (ref Closed))
+			| CTypes [(t,_)] ->
+				loop items t
 			| CTypes tl ->
 				items
 			| CUnknown ->

+ 32 - 38
src/context/display/displayJson.ml

@@ -49,10 +49,9 @@ class display_handler (jsonrpc : jsonrpc_handler) com (cs : CompilationCache.t)
 
 	method get_cs = cs
 
-	method enable_display mode =
+	method enable_display ?(skip_define=false) mode =
 		com.display <- create mode;
-		Parser.display_mode := mode;
-		Common.define_value com Define.Display "1"
+		if not skip_define then Common.define_value com Define.Display "1"
 
 	method set_display_file was_auto_triggered requires_offset =
 		let file = jsonrpc#get_opt_param (fun () ->
@@ -65,7 +64,7 @@ class display_handler (jsonrpc : jsonrpc_handler) com (cs : CompilationCache.t)
 		) None in
 
 		let pos = if requires_offset then jsonrpc#get_int_param "offset" else (-1) in
-		Parser.was_auto_triggered := was_auto_triggered;
+		com.parser_state.was_auto_triggered <- was_auto_triggered;
 
 		if file <> file_input_marker then begin
 			let file_unique = com.file_keys#get file in
@@ -99,13 +98,13 @@ class display_handler (jsonrpc : jsonrpc_handler) com (cs : CompilationCache.t)
 			com.file_contents <- file_contents;
 
 			match files with
-			| [] | [_] -> DisplayPosition.display_position#set { pfile = file; pmin = pos; pmax = pos; };
+			| [] -> DisplayPosition.display_position#set { pfile = file; pmin = pos; pmax = pos; };
 			| _ -> DisplayPosition.display_position#set_files files;
 		end
 end
 
 class hxb_reader_api_com
-	~(headers_only : bool)
+	~(full_restore : bool)
 	(com : Common.context)
 	(cc : CompilationCache.context_cache)
 = object(self)
@@ -116,7 +115,9 @@ class hxb_reader_api_com
 			m_path = path;
 			m_types = [];
 			m_statics = None;
-			m_extra = mc.mc_extra
+			(* Creating a new m_extra because if we keep the same reference, display requests *)
+			(* can alter it with bad data (for example adding dependencies that are not cached) *)
+			m_extra = { mc.mc_extra with m_deps = mc.mc_extra.m_deps; m_display_deps = None }
 		}
 
 	method add_module (m : module_def) =
@@ -137,8 +138,8 @@ class hxb_reader_api_com
 			cc#find_module m_path
 		with Not_found ->
 			let mc = cc#get_hxb_module m_path in
-			let reader = new HxbReader.hxb_reader mc.mc_path com.hxb_reader_stats in
-			fst (reader#read_chunks_until (self :> HxbReaderApi.hxb_reader_api) mc.mc_chunks (if headers_only then MTF else EOM))
+			let reader = new HxbReader.hxb_reader mc.mc_path com.hxb_reader_stats (if Common.defined com Define.HxbTimes then Some com.timer_ctx else None) in
+			fst (reader#read_chunks_until (self :> HxbReaderApi.hxb_reader_api) mc.mc_chunks (if full_restore then EOM else MTF) full_restore)
 
 	method basic_types =
 		com.basic
@@ -148,10 +149,13 @@ class hxb_reader_api_com
 
 	method read_expression_eagerly (cf : tclass_field) =
 		false
+
+	method make_lazy_type t f =
+		TLazy (make_unforced_lazy t f "com-api")
 end
 
-let find_module ~(headers_only : bool) com cc path =
-	(new hxb_reader_api_com ~headers_only com cc)#find_module path
+let find_module ~(full_restore : bool) com cc path =
+	(new hxb_reader_api_com ~full_restore com cc)#find_module path
 
 type handler_context = {
 	com : Common.context;
@@ -174,11 +178,11 @@ let handler =
 			hctx.send_result (JObject [
 				"methods",jarray methods;
 				"haxeVersion",jobject [
-					"major",jint version_major;
-					"minor",jint version_minor;
-					"patch",jint version_revision;
-					"pre",(match version_pre with None -> jnull | Some pre -> jstring pre);
-					"build",(match Version.version_extra with None -> jnull | Some(_,build) -> jstring build);
+					"major",jint hctx.com.version.major;
+					"minor",jint hctx.com.version.minor;
+					"patch",jint hctx.com.version.revision;
+					"pre",(match hctx.com.version.pre with None -> jnull | Some pre -> jstring pre);
+					"build",(match hctx.com.version.extra with None -> jnull | Some(_,build) -> jstring build);
 				];
 				"protocolVersion",jobject [
 					"major",jint 0;
@@ -207,13 +211,9 @@ let handler =
 		);
 		"display/diagnostics", (fun hctx ->
 			hctx.display#set_display_file false false;
-			hctx.display#enable_display DMNone;
+			hctx.display#enable_display ~skip_define:true DMNone;
+			hctx.com.display <- { hctx.com.display with dms_display_file_policy = DFPAlso; dms_per_file = true; dms_populate_cache = true };
 			hctx.com.report_mode <- RMDiagnostics (List.map (fun (f,_) -> f) hctx.com.file_contents);
-
-			(match hctx.com.file_contents with
-			| [file, None] ->
-				hctx.com.display <- { hctx.com.display with dms_display_file_policy = DFPAlso; dms_per_file = true; dms_populate_cache = !ServerConfig.populate_cache_from_display};
-			| _ -> ());
 		);
 		"display/implementation", (fun hctx ->
 			hctx.display#set_display_file false true;
@@ -349,12 +349,13 @@ let handler =
 			let path = Path.parse_path (hctx.jsonrpc#get_string_param "path") in
 			let cs = hctx.display#get_cs in
 			let cc = cs#get_context sign in
+			let full_restore = Define.defined hctx.com.defines Define.DisableHxbOptimizations in
 			let m = try
-				find_module ~headers_only:true hctx.com cc path
+				find_module ~full_restore hctx.com cc path
 			with Not_found ->
 				hctx.send_error [jstring "No such module"]
 			in
-			hctx.send_result (generate_module (cc#get_hxb) (find_module ~headers_only:true hctx.com cc) m)
+			hctx.send_result (generate_module (cc#get_hxb) (find_module ~full_restore hctx.com cc) m)
 		);
 		"server/type", (fun hctx ->
 			let sign = Digest.from_hex (hctx.jsonrpc#get_string_param "signature") in
@@ -362,7 +363,7 @@ let handler =
 			let typeName = hctx.jsonrpc#get_string_param "typeName" in
 			let cc = hctx.display#get_cs#get_context sign in
 			let m = try
-				find_module ~headers_only:true hctx.com cc path
+				find_module ~full_restore:true hctx.com cc path
 			with Not_found ->
 				hctx.send_error [jstring "No such module"]
 			in
@@ -462,27 +463,21 @@ let handler =
 				l := jstring ("Legacy completion " ^ (if b then "enabled" else "disabled")) :: !l;
 				()
 			) ();
-			hctx.jsonrpc#get_opt_param (fun () ->
-				let b = hctx.jsonrpc#get_bool_param "populateCacheFromDisplay" in
-				ServerConfig.populate_cache_from_display := b;
-				l := jstring ("Compilation cache refill from display " ^ (if b then "enabled" else "disabled")) :: !l;
-				()
-			) ();
 			hctx.send_result (jarray !l)
 		);
 		"server/memory",(fun hctx ->
-			let j = Memory.get_memory_json hctx.display#get_cs MCache in
+			let j = DisplayMemory.get_memory_json hctx.display#get_cs MCache in
 			hctx.send_result j
 		);
 		"server/memory/context",(fun hctx ->
 			let sign = Digest.from_hex (hctx.jsonrpc#get_string_param "signature") in
-			let j = Memory.get_memory_json hctx.display#get_cs (MContext sign) in
+			let j = DisplayMemory.get_memory_json hctx.display#get_cs (MContext sign) in
 			hctx.send_result j
 		);
 		"server/memory/module",(fun hctx ->
 			let sign = Digest.from_hex (hctx.jsonrpc#get_string_param "signature") in
 			let path = Path.parse_path (hctx.jsonrpc#get_string_param "path") in
-			let j = Memory.get_memory_json hctx.display#get_cs (MModule(sign,path)) in
+			let j = DisplayMemory.get_memory_json hctx.display#get_cs (MModule(sign,path)) in
 			hctx.send_result j
 		);
 		(* TODO: wait till gama complains about the naming, then change it to something else *)
@@ -497,7 +492,7 @@ let handler =
 	List.iter (fun (s,f) -> Hashtbl.add h s f) l;
 	h
 
-let parse_input com input report_times =
+let parse_input com input =
 	let input =
 		JsonRpc.handle_jsonrpc_error (fun () -> JsonRpc.parse_request input) send_json
 	in
@@ -510,9 +505,8 @@ let parse_input com input report_times =
 			"result",json;
 			"timestamp",jfloat (Unix.gettimeofday ());
 		] in
-		let fl = if !report_times then begin
-			close_times();
-			let _,_,root = Timer.build_times_tree () in
+		let fl = if com.timer_ctx.measure_times = Yes then begin
+			let _,_,root = Timer.build_times_tree com.timer_ctx in
 			begin match json_of_times root with
 			| None -> fl
 			| Some jo -> ("timers",jo) :: fl

+ 256 - 0
src/context/display/displayMemory.ml

@@ -0,0 +1,256 @@
+open Globals
+open Common
+open Memory
+open Genjson
+open Type
+
+let get_memory_json (cs : CompilationCache.t) mreq =
+	begin match mreq with
+	| MCache ->
+		let old_gc = Gc.get() in
+		Gc.set { old_gc with
+			Gc.max_overhead = 0;
+			Gc.space_overhead = 0
+		};
+		Gc.compact();
+		Gc.set old_gc;
+		let stat = Gc.quick_stat() in
+		let size = (float_of_int stat.Gc.heap_words) *. (float_of_int (Sys.word_size / 8)) in
+		let cache_mem = cs#get_pointers in
+		let contexts = cs#get_contexts in
+		let j_contexts = List.map (fun cc -> jobject [
+			"context",cc#get_json;
+			"size",jint (mem_size cc);
+		]) contexts in
+		let mem_size_2 v exclude =
+			Objsize.size_with_headers (Objsize.objsize v exclude [])
+		in
+		jobject [
+			"contexts",jarray j_contexts;
+			"memory",jobject [
+				"totalCache",jint (mem_size cs);
+				"contextCache",jint (mem_size cache_mem.(0));
+				"haxelibCache",jint (mem_size cache_mem.(1));
+				"directoryCache",jint (mem_size cache_mem.(2));
+				"nativeLibCache",jint (mem_size cache_mem.(3));
+				"additionalSizes",jarray (
+					(match !MacroContext.macro_interp_cache with
+					| Some interp ->
+						jobject ["name",jstring "macro interpreter";"size",jint (mem_size MacroContext.macro_interp_cache);"child",jarray [
+							jobject ["name",jstring "builtins";"size",jint (mem_size_2 interp.builtins [Obj.repr interp])];
+							jobject ["name",jstring "debug";"size",jint (mem_size_2 interp.debug [Obj.repr interp])];
+							jobject ["name",jstring "curapi";"size",jint (mem_size_2 interp.curapi [Obj.repr interp])];
+							jobject ["name",jstring "type_cache";"size",jint (mem_size_2 interp.type_cache [Obj.repr interp])];
+							jobject ["name",jstring "overrides";"size",jint (mem_size_2 interp.overrides [Obj.repr interp])];
+							jobject ["name",jstring "array_prototype";"size",jint (mem_size_2 interp.array_prototype [Obj.repr interp])];
+							jobject ["name",jstring "string_prototype";"size",jint (mem_size_2 interp.string_prototype [Obj.repr interp])];
+							jobject ["name",jstring "vector_prototype";"size",jint (mem_size_2 interp.vector_prototype [Obj.repr interp])];
+							jobject ["name",jstring "instance_prototypes";"size",jint (mem_size_2 interp.instance_prototypes [Obj.repr interp])];
+							jobject ["name",jstring "static_prototypes";"size",jint (mem_size_2 interp.static_prototypes [Obj.repr interp])];
+							jobject ["name",jstring "constructors";"size",jint (mem_size_2 interp.constructors [Obj.repr interp])];
+							jobject ["name",jstring "file_keys";"size",jint (mem_size_2 interp.file_keys [Obj.repr interp])];
+							jobject ["name",jstring "toplevel";"size",jint (mem_size_2 interp.toplevel [Obj.repr interp])];
+							jobject ["name",jstring "eval";"size",jint (mem_size_2 interp.eval [Obj.repr interp]);"child", jarray [
+								(match interp.eval.env with
+								| Some env ->
+									jobject ["name",jstring "env";"size",jint (mem_size_2 interp.eval.env [Obj.repr interp; Obj.repr interp.eval]);"child", jarray [
+										jobject ["name",jstring "env_info";"size",jint (mem_size_2 env.env_info [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_debug";"size",jint (mem_size_2 env.env_debug [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_locals";"size",jint (mem_size_2 env.env_locals [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_captures";"size",jint (mem_size_2 env.env_captures [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_extra_locals";"size",jint (mem_size_2 env.env_extra_locals [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_parent";"size",jint (mem_size_2 env.env_parent [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_eval";"size",jint (mem_size_2 env.env_eval [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+									]];
+								| None ->
+									jobject ["name",jstring "env";"size",jint (mem_size_2 interp.eval.env [Obj.repr interp; Obj.repr interp.eval])];
+								);
+								jobject ["name",jstring "thread";"size",jint (mem_size_2 interp.eval.thread [Obj.repr interp; Obj.repr interp.eval]);"child", jarray [
+									jobject ["name",jstring "tthread";"size",jint (mem_size_2 interp.eval.thread.tthread [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
+									jobject ["name",jstring "tdeque";"size",jint (mem_size_2 interp.eval.thread.tdeque [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
+									jobject ["name",jstring "tevents";"size",jint (mem_size_2 interp.eval.thread.tevents [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
+									jobject ["name",jstring "tstorage";"size",jint (mem_size_2 interp.eval.thread.tstorage [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
+								]];
+								jobject ["name",jstring "debug_state";"size",jint (mem_size_2 interp.eval.debug_state [Obj.repr interp; Obj.repr interp.eval])];
+								jobject ["name",jstring "breakpoint";"size",jint (mem_size_2 interp.eval.breakpoint [Obj.repr interp; Obj.repr interp.eval])];
+								jobject ["name",jstring "caught_types";"size",jint (mem_size_2 interp.eval.caught_types [Obj.repr interp; Obj.repr interp.eval])];
+								jobject ["name",jstring "caught_exception";"size",jint (mem_size_2 interp.eval.caught_exception [Obj.repr interp; Obj.repr interp.eval])];
+								jobject ["name",jstring "last_return";"size",jint (mem_size_2 interp.eval.last_return [Obj.repr interp; Obj.repr interp.eval])];
+								jobject ["name",jstring "debug_channel";"size",jint (mem_size_2 interp.eval.debug_channel [Obj.repr interp; Obj.repr interp.eval])];
+							]];
+							jobject ["name",jstring "evals";"size",jint (mem_size_2 interp.evals [Obj.repr interp])];
+							jobject ["name",jstring "exception_stack";"size",jint (mem_size_2 interp.exception_stack [Obj.repr interp])];
+						]];
+					| None ->
+						jobject ["name",jstring "macro interpreter";"size",jint (mem_size MacroContext.macro_interp_cache)];
+					)
+					::
+					[
+						(* jobject ["name",jstring "macro stdlib";"size",jint (mem_size (EvalContext.GlobalState.stdlib))];
+						jobject ["name",jstring "macro macro_lib";"size",jint (mem_size (EvalContext.GlobalState.macro_lib))]; *)
+						jobject ["name",jstring "last completion result";"size",jint (mem_size (DisplayException.last_completion_result))];
+						jobject ["name",jstring "Lexer file cache";"size",jint (mem_size (Lexer.all_files))];
+						jobject ["name",jstring "GC heap words";"size",jint (int_of_float size)];
+					]
+				);
+			]
+		]
+	| MContext sign ->
+		let cc = cs#get_context sign in
+		let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty cs#get_modules in
+		let l = Hashtbl.fold (fun _ m acc ->
+			(m,(get_module_memory cs all_modules m)) :: acc
+		) cc#get_modules [] in
+		let l = List.sort (fun (_,(size1,_)) (_,(size2,_)) -> compare size2 size1) l in
+		let leaks = ref [] in
+		let l = List.map (fun (m,(size,(reached,_,_,mleaks))) ->
+			if reached then leaks := (m,mleaks) :: !leaks;
+			jobject [
+				"path",jstring (s_type_path m.m_path);
+				"size",jint size;
+				"hasTypes",jbool (match m.m_extra.m_kind with MCode | MMacro -> true | _ -> false);
+			]
+		) l in
+		let leaks = match !leaks with
+			| [] -> jnull
+			| leaks ->
+				let jleaks = List.map (fun (m,leaks) ->
+					let jleaks = List.map (fun s -> jobject ["path",jstring s]) leaks in
+					jobject [
+						"path",jstring (s_type_path m.m_path);
+						"leaks",jarray jleaks;
+					]
+				) leaks in
+				jarray jleaks
+		in
+		let cache_mem = cc#get_pointers in
+		jobject [
+			"leaks",leaks;
+			"syntaxCache",jobject [
+				"size",jint (mem_size cache_mem.(0));
+			];
+			"moduleCache",jobject [
+				"size",jint (mem_size cache_mem.(1));
+				"list",jarray l;
+			];
+			"binaryCache",jobject [
+				"size",jint (mem_size cache_mem.(2));
+			];
+		]
+	| MModule(sign,path) ->
+		let cc = cs#get_context sign in
+		let m = cc#find_module path in
+		let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty cs#get_modules in
+		let _,(_,deps,out,_) = get_module_memory cs all_modules m in
+		let deps = update_module_type_deps deps m in
+		let out = get_out out in
+		let types = List.map (fun md ->
+			let fields,inf = match md with
+				| TClassDecl c ->
+					let own_deps = ref deps in
+					let field acc cf =
+						let repr = Obj.repr cf in
+						own_deps := List.filter (fun repr' -> repr != repr') !own_deps;
+						let deps = List.filter (fun repr' -> repr' != repr) deps in
+						let size = Objsize.size_with_headers (Objsize.objsize cf deps out) in
+						(cf,size) :: acc
+					in
+					let fields = List.fold_left field [] c.cl_ordered_fields in
+					let fields = List.fold_left field fields c.cl_ordered_statics in
+					let fields = List.sort (fun (_,size1) (_,size2) -> compare size2 size1) fields in
+					let fields = List.map (fun (cf,size) ->
+						jobject [
+							"name",jstring cf.cf_name;
+							"size",jint size;
+							"pos",generate_pos_as_location cf.cf_name_pos;
+						]
+					) fields in
+					let repr = Obj.repr c in
+					let deps = List.filter (fun repr' -> repr' != repr) !own_deps in
+					fields,Objsize.objsize c deps out
+				| TEnumDecl en ->
+					let repr = Obj.repr en in
+					let deps = List.filter (fun repr' -> repr' != repr) deps in
+					[],Objsize.objsize en deps out
+				| TTypeDecl td ->
+					let repr = Obj.repr td in
+					let deps = List.filter (fun repr' -> repr' != repr) deps in
+					[],Objsize.objsize td deps out
+				| TAbstractDecl a ->
+					let repr = Obj.repr a in
+					let deps = List.filter (fun repr' -> repr' != repr) deps in
+					[],Objsize.objsize a deps out
+			in
+			let size = Objsize.size_with_headers inf in
+			let jo = jobject [
+				"name",jstring (s_type_path (t_infos md).mt_path);
+				"size",jint size;
+				"pos",generate_pos_as_location (t_infos md).mt_name_pos;
+				"fields",jarray fields;
+			] in
+			size,jo
+		) m.m_types in
+		let types = List.sort (fun (size1,_) (size2,_) -> compare size2 size1) types in
+		let types = List.map snd types in
+		jobject [
+			"moduleExtra",jint (Objsize.size_with_headers (Objsize.objsize m.m_extra deps out));
+			"types",jarray types;
+		]
+	end
+
+let display_memory com =
+	let verbose = com.verbose in
+	let print = print_endline in
+	Gc.full_major();
+	Gc.compact();
+	let mem = Gc.stat() in
+	print ("Total Allocated Memory " ^ fmt_size (mem.Gc.heap_words * (Sys.word_size asr 8)));
+	print ("Free Memory " ^ fmt_size (mem.Gc.free_words * (Sys.word_size asr 8)));
+	let c = com.cs in
+	print ("Total cache size " ^ size c);
+	(* print ("  haxelib " ^ size c.c_haxelib); *)
+	(* print ("  parsed ast " ^ size c.c_files ^ " (" ^ string_of_int (Hashtbl.length c.c_files) ^ " files stored)"); *)
+	(* print ("  typed modules " ^ size c.c_modules ^ " (" ^ string_of_int (Hashtbl.length c.c_modules) ^ " modules stored)"); *)
+	let module_list = c#get_modules in
+	let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty module_list in
+	let modules = List.fold_left (fun acc m ->
+		let (size,r) = get_module_memory c all_modules m in
+		(m,size,r) :: acc
+	) [] module_list in
+	let cur_key = ref "" and tcount = ref 0 and mcount = ref 0 in
+	List.iter (fun (m,size,(reached,deps,out,leaks)) ->
+		let key = m.m_extra.m_sign in
+		if key <> !cur_key then begin
+			print (Printf.sprintf ("    --- CONFIG %s ----------------------------") (Digest.to_hex key));
+			cur_key := key;
+		end;
+		print (Printf.sprintf "    %s : %s" (s_type_path m.m_path) (fmt_size size));
+		(if reached then try
+			incr mcount;
+			let lcount = ref 0 in
+			let leak l =
+				incr lcount;
+				incr tcount;
+				print (Printf.sprintf "      LEAK %s" l);
+				if !lcount >= 3 && !tcount >= 100 && not verbose then begin
+					print (Printf.sprintf "      ...");
+					raise Exit;
+				end;
+			in
+			List.iter leak leaks;
+		with Exit ->
+			());
+		if verbose then begin
+			print (Printf.sprintf "      %d total deps" (List.length deps));
+			PMap.iter (fun _ mdep ->
+				let md = (com.cs#get_context mdep.md_sign)#find_module mdep.md_path in
+				print (Printf.sprintf "      dep %s%s" (s_type_path mdep.md_path) (module_sign key md));
+			) m.m_extra.m_deps;
+		end;
+		flush stdout
+	) (List.sort (fun (m1,s1,_) (m2,s2,_) ->
+		let k1 = m1.m_extra.m_sign and k2 = m2.m_extra.m_sign in
+		if k1 = k2 then s1 - s2 else if k1 > k2 then 1 else -1
+	) modules);
+	if !mcount > 0 then print ("*** " ^ string_of_int !mcount ^ " modules have leaks !");
+	print "Cache dump complete"

+ 1 - 1
src/context/display/displayPath.ml

@@ -189,7 +189,7 @@ let handle_path_display ctx path p =
 			(* We assume that we want to go to the module file, not a specific type
 			   which might not even exist anyway. *)
 			let mt = ctx.g.do_load_module ctx (sl,s) p in
-			let p = { pfile = (Path.UniqueKey.lazy_path mt.m_extra.m_file); pmin = 0; pmax = 0} in
+			let p = file_pos (Path.UniqueKey.lazy_path mt.m_extra.m_file) in
 			raise_positions [p]
 		| (IDKModule(sl,s),_),DMHover ->
 			let m = ctx.g.do_load_module ctx (sl,s) p in

+ 9 - 8
src/context/display/displayTexpr.ml

@@ -62,7 +62,8 @@ let actually_check_display_field ctx c cff p =
 	let cff = TypeloadFields.transform_field (ctx,cctx) c cff (ref []) (pos cff.cff_name) in
 	let display_modifier = Typeload.check_field_access ctx cff in
 	let fctx = TypeloadFields.create_field_context ctx cctx cff true display_modifier in
-	let cf = TypeloadFields.init_field (ctx,cctx,fctx) cff in
+	let cf = TypeloadFields.create_class_field cctx cff in
+	TypeloadFields.init_field (ctx,cctx,fctx) cff cf;
 	flush_pass ctx.g PTypeField ("check_display_field",(fst c.cl_path @ [snd c.cl_path;fst cff.cff_name]));
 	ignore(follow cf.cf_type)
 
@@ -72,7 +73,7 @@ let check_display_field ctx sc c cf =
 
 let check_display_class ctx decls c =
 	let check_field sc cf =
-		if display_position#enclosed_in cf.cf_pos then
+		if not (has_class_field_flag cf CfNoLookup) && display_position#enclosed_in cf.cf_pos then
 			check_display_field ctx sc c cf;
 		DisplayEmitter.check_display_metadata ctx cf.cf_meta
 	in
@@ -87,7 +88,7 @@ let check_display_class ctx decls c =
 		List.iter check_field c.cl_ordered_statics;
 	| _ ->
 		let sc = find_class_by_position decls c.cl_name_pos in
-		ignore(Typeload.type_type_params ctx TPHType c.cl_path null_pos sc.d_params);
+		ignore(Typeload.type_type_params ctx TPHType c.cl_path sc.d_params);
 		List.iter (function
 			| (HExtends ptp | HImplements ptp) when display_position#enclosed_in ptp.pos_full ->
 				ignore(Typeload.load_instance ~allow_display:true ctx ptp ParamNormal LoadNormal)
@@ -101,7 +102,7 @@ let check_display_class ctx decls c =
 
 let check_display_enum ctx decls en =
 	let se = find_enum_by_position decls en.e_name_pos in
-	ignore(Typeload.type_type_params ctx TPHType en.e_path null_pos se.d_params);
+	ignore(Typeload.type_type_params ctx TPHType en.e_path se.d_params);
 	PMap.iter (fun _ ef ->
 		if display_position#enclosed_in ef.ef_pos then begin
 			let sef = find_enum_field_by_position se ef.ef_name_pos in
@@ -111,12 +112,12 @@ let check_display_enum ctx decls en =
 
 let check_display_typedef ctx decls td =
 	let st = find_typedef_by_position decls td.t_name_pos in
-	ignore(Typeload.type_type_params ctx TPHType td.t_path null_pos st.d_params);
+	ignore(Typeload.type_type_params ctx TPHType td.t_path st.d_params);
 	ignore(Typeload.load_complex_type ctx true LoadNormal st.d_data)
 
 let check_display_abstract ctx decls a =
 	let sa = find_abstract_by_position decls a.a_name_pos in
-	ignore(Typeload.type_type_params ctx TPHType a.a_path null_pos sa.d_params);
+	ignore(Typeload.type_type_params ctx TPHType a.a_path sa.d_params);
 	List.iter (function
 		| (AbOver(ct,p) | AbFrom(ct,p) | AbTo(ct,p)) when display_position#enclosed_in p ->
 			ignore(Typeload.load_complex_type ctx true LoadNormal (ct,p))
@@ -173,11 +174,11 @@ let check_display_file ctx cs =
 			let m = try
 				ctx.com.module_lut#find path
 			with Not_found ->
-				begin match !TypeloadCacheHook.type_module_hook ctx.com (delay ctx.g PTypeField) path null_pos with
+				begin match !TypeloadCacheHook.type_module_hook ctx.com (delay ctx.g) path null_pos with
 				| NoModule | BadModule _ -> raise Not_found
 				| BinaryModule mc ->
 					let api = (new TypeloadModule.hxb_reader_api_typeload ctx.com ctx.g TypeloadModule.load_module' p :> HxbReaderApi.hxb_reader_api) in
-					let reader = new HxbReader.hxb_reader path ctx.com.hxb_reader_stats in
+					let reader = new HxbReader.hxb_reader path ctx.com.hxb_reader_stats (if Common.defined ctx.com Define.HxbTimes then Some ctx.com.timer_ctx else None) in
 					let m = reader#read_chunks api mc.mc_chunks in
 					m
 				| GoodModule m ->

+ 211 - 211
src/context/display/displayToplevel.ml

@@ -109,20 +109,20 @@ end
 
 let explore_class_paths com timer class_paths recursive f_pack f_module =
 	let cs = com.cs in
-	let t = Timer.timer (timer @ ["class path exploration"]) in
-	let checked = Hashtbl.create 0 in
-	let tasks = ExtList.List.filter_map (fun path ->
-		match path#get_directory_path with
-			| Some path ->
-				Some (new explore_class_path_task com checked recursive f_pack f_module path [])
-			| None ->
-				None
-	) class_paths in
-	let task = new arbitrary_task ["explore"] 50 (fun () ->
-		List.iter (fun task -> task#run) tasks
-	) in
-	cs#add_task task;
-	t()
+	Timer.time com.timer_ctx (timer @ ["class path exploration"]) (fun () ->
+		let checked = Hashtbl.create 0 in
+		let tasks = ExtList.List.filter_map (fun path ->
+			match path#get_directory_path with
+				| Some path ->
+					Some (new explore_class_path_task com checked recursive f_pack f_module path [])
+				| None ->
+					None
+		) class_paths in
+		let task = new arbitrary_task ["explore"] 50 (fun () ->
+			List.iter (fun task -> task#run) tasks
+		) in
+		cs#add_task task;
+	) ()
 
 let read_class_paths com timer =
 	explore_class_paths com timer (com.class_paths#filter (fun cp -> cp#path <> "")) true (fun _ -> ()) (fun file path ->
@@ -225,7 +225,6 @@ let is_pack_visible pack =
 	not (List.exists (fun s -> String.length s > 0 && s.[0] = '_') pack)
 
 let collect ctx tk with_type sort =
-	let t = Timer.timer ["display";"toplevel collect"] in
 	let cctx = CollectionContext.create ctx in
 	let curpack = fst ctx.c.curclass.cl_path in
 	(* Note: This checks for the explicit `ServerConfig.legacy_completion` setting instead of using
@@ -298,12 +297,12 @@ let collect ctx tk with_type sort =
 	| TKType | TKOverride -> ()
 	| TKExpr p | TKPattern p | TKField p ->
 		(* locals *)
-		let t = Timer.timer ["display";"toplevel collect";"locals"] in
-		PMap.iter (fun _ v ->
-			if not (is_gen_local v) then
-				add (make_ci_local v (tpair ~values:(get_value_meta v.v_meta) v.v_type)) (Some v.v_name)
-		) ctx.f.locals;
-		t();
+		Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"locals"] (fun () ->
+			PMap.iter (fun _ v ->
+				if not (is_gen_local v) then
+					add (make_ci_local v (tpair ~values:(get_value_meta v.v_meta) v.v_type)) (Some v.v_name)
+			) ctx.f.locals;
+		) ();
 
 		let add_field scope origin cf =
 			let origin,cf = match origin with
@@ -329,137 +328,137 @@ let collect ctx tk with_type sort =
 			if not (Meta.has Meta.NoCompletion cf.cf_meta) then add_field scope origin cf
 		in
 
-		let t = Timer.timer ["display";"toplevel collect";"fields"] in
-		(* member fields *)
-		if ctx.e.curfun <> FunStatic then begin
-			let all_fields = Type.TClass.get_all_fields ctx.c.curclass (extract_param_types ctx.c.curclass.cl_params) in
-			PMap.iter (fun _ (c,cf) ->
-				let origin = if c == ctx.c.curclass then Self (TClassDecl c) else Parent (TClassDecl c) in
-				maybe_add_field CFSMember origin cf
-			) all_fields;
-			(* TODO: local using? *)
-		end;
-
-		(* statics *)
-		begin match ctx.c.curclass.cl_kind with
-		| KAbstractImpl ({a_impl = Some c} as a) ->
-			let origin = Self (TAbstractDecl a) in
-			List.iter (fun cf ->
-				if has_class_field_flag cf CfImpl then begin
-					if ctx.e.curfun = FunStatic then ()
-					else begin
-						let cf = prepare_using_field cf in
-						maybe_add_field CFSMember origin cf
-					end
-				end else
-					maybe_add_field CFSStatic origin cf
-			) c.cl_ordered_statics
-		| _ ->
-			List.iter (maybe_add_field CFSStatic (Self (TClassDecl ctx.c.curclass))) ctx.c.curclass.cl_ordered_statics
-		end;
-		t();
-
-		let t = Timer.timer ["display";"toplevel collect";"enum ctors"] in
-		(* enum constructors *)
-		let rec enum_ctors t =
-			match t with
-			| TAbstractDecl ({a_impl = Some c} as a) when a.a_enum && not (path_exists cctx a.a_path) && ctx.c.curclass != c ->
-				add_path cctx a.a_path;
+		Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"fields"] (fun () ->
+			(* member fields *)
+			if ctx.e.curfun <> FunStatic then begin
+				let all_fields = Type.TClass.get_all_fields ctx.c.curclass (extract_param_types ctx.c.curclass.cl_params) in
+				PMap.iter (fun _ (c,cf) ->
+					let origin = if c == ctx.c.curclass then Self (TClassDecl c) else Parent (TClassDecl c) in
+					maybe_add_field CFSMember origin cf
+				) all_fields;
+				(* TODO: local using? *)
+			end;
+
+			(* statics *)
+			begin match ctx.c.curclass.cl_kind with
+			| KAbstractImpl ({a_impl = Some c} as a) ->
+				let origin = Self (TAbstractDecl a) in
 				List.iter (fun cf ->
-					let ccf = CompletionClassField.make cf CFSMember (Self (decl_of_class c)) true in
-					if (has_class_field_flag cf CfEnum) && not (Meta.has Meta.NoCompletion cf.cf_meta) then
-						add (make_ci_enum_abstract_field a ccf (tpair cf.cf_type)) (Some cf.cf_name);
+					if has_class_field_flag cf CfImpl then begin
+						if ctx.e.curfun = FunStatic then ()
+						else begin
+							let cf = prepare_using_field cf in
+							maybe_add_field CFSMember origin cf
+						end
+					end else
+						maybe_add_field CFSStatic origin cf
 				) c.cl_ordered_statics
-			| TTypeDecl t ->
-				begin match follow t.t_type with
-					| TEnum (e,_) -> enum_ctors (TEnumDecl e)
-					| _ -> ()
-				end
-			| TEnumDecl e when not (path_exists cctx e.e_path) ->
-				add_path cctx e.e_path;
-				let origin = Self (TEnumDecl e) in
-				PMap.iter (fun _ ef ->
-					let is_qualified = is_qualified cctx ef.ef_name in
-					add (make_ci_enum_field (CompletionEnumField.make ef origin is_qualified) (tpair ef.ef_type)) (Some ef.ef_name)
-				) e.e_constrs;
 			| _ ->
-				()
-		in
-		List.iter enum_ctors ctx.m.curmod.m_types;
-		List.iter enum_ctors (List.map fst ctx.m.import_resolution#extract_type_imports);
-
-		(* enum constructors of expected type *)
-		begin match with_type with
-			| WithType.WithType(t,_) ->
-				(try enum_ctors (module_type_of_type (follow t)) with Exit -> ())
-			| _ -> ()
-		end;
-		t();
-
-		let t = Timer.timer ["display";"toplevel collect";"globals"] in
-		(* imported globals *)
-		PMap.iter (fun name (mt,s,_) ->
-			try
-				let is_qualified = is_qualified cctx name in
-				let class_import c =
-					let cf = PMap.find s c.cl_statics in
-					let cf = if name = cf.cf_name then cf else {cf with cf_name = name} in
-					let decl,make = match c.cl_kind with
-						| KAbstractImpl a -> TAbstractDecl a,
-							if has_class_field_flag cf CfEnum then make_ci_enum_abstract_field a else make_ci_class_field
-						| _ -> TClassDecl c,make_ci_class_field
+				List.iter (maybe_add_field CFSStatic (Self (TClassDecl ctx.c.curclass))) ctx.c.curclass.cl_ordered_statics
+			end;
+		) ();
+
+		Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"enum ctors"] (fun () ->
+			(* enum constructors *)
+			let rec enum_ctors t =
+				match t with
+				| TAbstractDecl ({a_impl = Some c} as a) when a.a_enum && not (path_exists cctx a.a_path) && ctx.c.curclass != c ->
+					add_path cctx a.a_path;
+					List.iter (fun cf ->
+						let ccf = CompletionClassField.make cf CFSMember (Self (decl_of_class c)) true in
+						if (has_class_field_flag cf CfEnum) && not (Meta.has Meta.NoCompletion cf.cf_meta) then
+							add (make_ci_enum_abstract_field a ccf (tpair cf.cf_type)) (Some cf.cf_name);
+					) c.cl_ordered_statics
+				| TTypeDecl t ->
+					begin match follow t.t_type with
+						| TEnum (e,_) -> enum_ctors (TEnumDecl e)
+						| _ -> ()
+					end
+				| TEnumDecl e when not (path_exists cctx e.e_path) ->
+					add_path cctx e.e_path;
+					let origin = Self (TEnumDecl e) in
+					PMap.iter (fun _ ef ->
+						let is_qualified = is_qualified cctx ef.ef_name in
+						add (make_ci_enum_field (CompletionEnumField.make ef origin is_qualified) (tpair ef.ef_type)) (Some ef.ef_name)
+					) e.e_constrs;
+				| _ ->
+					()
+			in
+			List.iter enum_ctors ctx.m.curmod.m_types;
+			List.iter enum_ctors (List.map fst ctx.m.import_resolution#extract_type_imports);
+
+			(* enum constructors of expected type *)
+			begin match with_type with
+				| WithType.WithType(t,_) ->
+					(try enum_ctors (module_type_of_type (follow t)) with Exit -> ())
+				| _ -> ()
+			end;
+		) ();
+
+		Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"globals"] (fun () ->
+			(* imported globals *)
+			PMap.iter (fun name (mt,s,_) ->
+				try
+					let is_qualified = is_qualified cctx name in
+					let class_import c =
+						let cf = PMap.find s c.cl_statics in
+						let cf = if name = cf.cf_name then cf else {cf with cf_name = name} in
+						let decl,make = match c.cl_kind with
+							| KAbstractImpl a -> TAbstractDecl a,
+								if has_class_field_flag cf CfEnum then make_ci_enum_abstract_field a else make_ci_class_field
+							| _ -> TClassDecl c,make_ci_class_field
+						in
+						let origin = StaticImport decl in
+						if can_access ctx c cf true && not (Meta.has Meta.NoCompletion cf.cf_meta) then begin
+							add (make (CompletionClassField.make cf CFSStatic origin is_qualified) (tpair ~values:(get_value_meta cf.cf_meta) cf.cf_type)) (Some name)
+						end
 					in
-					let origin = StaticImport decl in
-					if can_access ctx c cf true && not (Meta.has Meta.NoCompletion cf.cf_meta) then begin
-						add (make (CompletionClassField.make cf CFSStatic origin is_qualified) (tpair ~values:(get_value_meta cf.cf_meta) cf.cf_type)) (Some name)
+					match resolve_typedef mt with
+						| TClassDecl c -> class_import c;
+						| TEnumDecl en ->
+							let ef = PMap.find s en.e_constrs in
+							let ef = if name = ef.ef_name then ef else {ef with ef_name = name} in
+							let origin = StaticImport (TEnumDecl en) in
+							add (make_ci_enum_field (CompletionEnumField.make ef origin is_qualified) (tpair ef.ef_type)) (Some s)
+						| TAbstractDecl {a_impl = Some c} -> class_import c;
+						| _ -> raise Not_found
+				with Not_found ->
+					()
+			) ctx.m.import_resolution#extract_field_imports;
+		) ();
+
+		Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"rest"] (fun () ->
+			(* literals *)
+			add (make_ci_literal "null" (tpair t_dynamic)) (Some "null");
+			add (make_ci_literal "true" (tpair ctx.com.basic.tbool)) (Some "true");
+			add (make_ci_literal "false" (tpair ctx.com.basic.tbool)) (Some "false");
+			begin match ctx.e.curfun with
+				| FunMember | FunConstructor | FunMemberClassLocal ->
+					let t = TInst(ctx.c.curclass,extract_param_types ctx.c.curclass.cl_params) in
+					add (make_ci_literal "this" (tpair t)) (Some "this");
+					begin match ctx.c.curclass.cl_super with
+						| Some(c,tl) -> add (make_ci_literal "super" (tpair (TInst(c,tl)))) (Some "super")
+						| None -> ()
 					end
-				in
-				match resolve_typedef mt with
-					| TClassDecl c -> class_import c;
-					| TEnumDecl en ->
-						let ef = PMap.find s en.e_constrs in
-						let ef = if name = ef.ef_name then ef else {ef with ef_name = name} in
-						let origin = StaticImport (TEnumDecl en) in
-						add (make_ci_enum_field (CompletionEnumField.make ef origin is_qualified) (tpair ef.ef_type)) (Some s)
-					| TAbstractDecl {a_impl = Some c} -> class_import c;
-					| _ -> raise Not_found
-			with Not_found ->
-				()
-		) ctx.m.import_resolution#extract_field_imports;
-		t();
-
-		let t = Timer.timer ["display";"toplevel collect";"rest"] in
-		(* literals *)
-		add (make_ci_literal "null" (tpair t_dynamic)) (Some "null");
-		add (make_ci_literal "true" (tpair ctx.com.basic.tbool)) (Some "true");
-		add (make_ci_literal "false" (tpair ctx.com.basic.tbool)) (Some "false");
-		begin match ctx.e.curfun with
-			| FunMember | FunConstructor | FunMemberClassLocal ->
-				let t = TInst(ctx.c.curclass,extract_param_types ctx.c.curclass.cl_params) in
-				add (make_ci_literal "this" (tpair t)) (Some "this");
-				begin match ctx.c.curclass.cl_super with
-					| Some(c,tl) -> add (make_ci_literal "super" (tpair (TInst(c,tl)))) (Some "super")
-					| None -> ()
-				end
-			| FunMemberAbstract ->
-				let t = TInst(ctx.c.curclass,extract_param_types ctx.c.curclass.cl_params) in
-				add (make_ci_literal "abstract" (tpair t)) (Some "abstract");
-			| _ ->
-				()
-		end;
-
-		if not is_legacy_completion then begin
-			(* keywords *)
-			let kwds = [
-				Function; Var; Final; If; Else; While; Do; For; Break; Return; Continue; Switch;
-				Try; New; Throw; Untyped; Cast; Inline;
-			] in
-			List.iter (fun kwd -> add(make_ci_keyword kwd) (Some (s_keyword kwd))) kwds;
-
-			(* builtins *)
-			add (make_ci_literal "trace" (tpair (TFun(["value",false,t_dynamic],ctx.com.basic.tvoid)))) (Some "trace")
-		end;
-		t()
+				| FunMemberAbstract ->
+					let t = TInst(ctx.c.curclass,extract_param_types ctx.c.curclass.cl_params) in
+					add (make_ci_literal "abstract" (tpair t)) (Some "abstract");
+				| _ ->
+					()
+			end;
+
+			if not is_legacy_completion then begin
+				(* keywords *)
+				let kwds = [
+					Function; Var; Final; If; Else; While; Do; For; Break; Return; Continue; Switch;
+					Try; New; Throw; Untyped; Cast; Inline;
+				] in
+				List.iter (fun kwd -> add(make_ci_keyword kwd) (Some (s_keyword kwd))) kwds;
+
+				(* builtins *)
+				add (make_ci_literal "trace" (tpair (TFun(["value",false,t_dynamic],ctx.com.basic.tvoid)))) (Some "trace")
+			end;
+		) ();
 	end;
 
 	(* type params *)
@@ -473,75 +472,76 @@ let collect ctx tk with_type sort =
 	(* module imports *)
 	List.iter add_type (List.rev_map fst ctx.m.import_resolution#extract_type_imports); (* reverse! *)
 
-	let t_syntax = Timer.timer ["display";"toplevel collect";"syntax"] in
-	(* types from files *)
 	let cs = ctx.com.cs in
-	(* online: iter context files *)
-	init_or_update_server cs ctx.com ["display";"toplevel"];
-	let cc = CommonCache.get_cache ctx.com in
-	let files = cc#get_files in
-	(* Sort files by reverse distance of their package to our current package. *)
-	let files = Hashtbl.fold (fun file cfile acc ->
-		let i = pack_similarity curpack cfile.c_package in
-		((file,cfile),i) :: acc
-	) files [] in
-	let files = List.sort (fun (_,i1) (_,i2) -> -compare i1 i2) files in
-	let check_package pack = match List.rev pack with
+		let check_package pack = match List.rev pack with
 		| [] -> ()
 		| s :: sl -> add_package (List.rev sl,s)
 	in
-	List.iter (fun ((file_key,cfile),_) ->
-		let module_name = CompilationCache.get_module_name_of_cfile cfile.c_file_path.file cfile in
-		let dot_path = s_type_path (cfile.c_package,module_name) in
-		(* In legacy mode we only show toplevel types. *)
-		if is_legacy_completion && cfile.c_package <> [] then begin
-			(* And only toplevel packages. *)
-			match cfile.c_package with
-			| [s] -> add_package ([],s)
-			| _ -> ()
-		end else if (List.exists (fun e -> ExtString.String.starts_with dot_path (e ^ ".")) !exclude) then
-			()
-		else begin
-			ctx.com.module_to_file#add (cfile.c_package,module_name) cfile.c_file_path;
-			if process_decls cfile.c_package module_name cfile.c_decls then check_package cfile.c_package;
-		end
-	) files;
-	t_syntax();
-
-	let t_native_lib = Timer.timer ["display";"toplevel collect";"native lib"] in
-	List.iter (fun file ->
-		match cs#get_native_lib file with
-		| Some lib ->
-			Hashtbl.iter (fun path (pack,decls) ->
-				if process_decls pack (snd path) decls then check_package pack;
-			) lib.c_nl_files
-		| None ->
-			()
-	) ctx.com.native_libs.all_libs;
-	t_native_lib();
-
-	let t_packages = Timer.timer ["display";"toplevel collect";"packages"] in
-	(* packages *)
-	Hashtbl.iter (fun path _ ->
-		let full_pack = fst path @ [snd path] in
-		if is_pack_visible full_pack then add (make_ci_package path []) (Some (snd path))
-	) packages;
-	t_packages();
-
-	t();
-
-	let t = Timer.timer ["display";"toplevel sorting"] in
-	(* sorting *)
-	let l = DynArray.to_list cctx.items in
-	let l = if is_legacy_completion then
-		List.sort (fun item1 item2 -> compare (get_name item1) (get_name item2)) l
-	else if sort then
-		Display.sort_fields l with_type tk
-	else
+	Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"syntax"] (fun () ->
+		(* types from files *)
+		(* online: iter context files *)
+		init_or_update_server cs ctx.com ["display";"toplevel"];
+		let cc = CommonCache.get_cache ctx.com in
+		let files = cc#get_files in
+		(* Sort files by reverse distance of their package to our current package. *)
+		let files = Hashtbl.fold (fun file cfile acc ->
+			let i = pack_similarity curpack cfile.c_package in
+			((file,cfile),i) :: acc
+		) files [] in
+		let files = List.sort (fun (_,i1) (_,i2) -> -compare i1 i2) files in
+		List.iter (fun ((file_key,cfile),_) ->
+			let module_name = CompilationCache.get_module_name_of_cfile cfile.c_file_path.file cfile in
+			let dot_path = s_type_path (cfile.c_package,module_name) in
+			(* In legacy mode we only show toplevel types. *)
+			if is_legacy_completion && cfile.c_package <> [] then begin
+				(* And only toplevel packages. *)
+				match cfile.c_package with
+				| [s] -> add_package ([],s)
+				| _ -> ()
+			end else if (List.exists (fun e -> ExtString.String.starts_with dot_path (e ^ ".")) !exclude) then
+				()
+			else begin
+				ctx.com.module_to_file#add (cfile.c_package,module_name) cfile.c_file_path;
+				if process_decls cfile.c_package module_name cfile.c_decls then check_package cfile.c_package;
+			end
+		) files;
+	) ();
+
+	Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"native lib"] (fun () ->
+		List.iter (fun file ->
+			match cs#get_native_lib file with
+			| Some lib ->
+				Hashtbl.iter (fun path (pack,decls) ->
+					if process_decls pack (snd path) decls then check_package pack;
+				) lib.c_nl_files
+			| None ->
+				()
+		) ctx.com.native_libs.all_libs;
+	) ();
+
+	Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"packages"] (fun () ->
+		(* packages *)
+		Hashtbl.iter (fun path _ ->
+			let full_pack = fst path @ [snd path] in
+			if is_pack_visible full_pack then add (make_ci_package path []) (Some (snd path))
+		) packages;
+	) ();
+
+	Timer.time ctx.com.timer_ctx ["display";"toplevel sorting"] (fun () ->
+		(* sorting *)
+		let l = DynArray.to_list cctx.items in
+		let l = if is_legacy_completion then
+			List.sort (fun item1 item2 -> compare (get_name item1) (get_name item2)) l
+		else if sort then
+			Display.sort_fields l with_type tk
+		else
+			l
+		in
 		l
-	in
-	t();
-	l
+	) ()
+
+let collect ctx tk with_type sort =
+	Timer.time ctx.com.timer_ctx ["display";"toplevel collect"] (collect ctx tk with_type) sort
 
 let collect_and_raise ctx tk with_type cr (name,pname) pinsert =
 	let fields = match !DisplayException.last_completion_pos with

+ 1 - 1
src/context/display/documentSymbols.ml

@@ -114,7 +114,7 @@ let collect_module_symbols mname with_locals (pack,decls) =
 	) decls;
 	begin match mname with
 	| Some(file,mname) when not (Hashtbl.mem type_decls mname) ->
-		add mname Module {pfile = file; pmin = 0; pmax = 0} (String.concat "." pack) false
+		add mname Module (file_pos file) (String.concat "." pack) false
 	| _ ->
 		()
 	end;

+ 4 - 4
src/context/display/exprPreprocessing.ml

@@ -3,7 +3,7 @@ open Ast
 open DisplayTypes.DisplayMode
 open DisplayPosition
 
-let find_before_pos dm e =
+let find_before_pos was_auto_triggered dm e =
 	let display_pos = ref (DisplayPosition.display_position#get) in
 	let was_annotated = ref false in
 	let is_annotated,is_completion = match dm with
@@ -160,7 +160,7 @@ let find_before_pos dm e =
 			raise Exit
 		| EMeta((Meta.Markup,_,_),(EConst(String _),p)) when is_annotated p ->
 			annotate_marked e
-		| EConst (String (_,q)) when ((q <> SSingleQuotes) || !Parser.was_auto_triggered) && is_annotated (pos e) && is_completion ->
+		| EConst (String (_,q)) when ((q <> SSingleQuotes) || was_auto_triggered) && is_annotated (pos e) && is_completion ->
 			(* TODO: check if this makes any sense *)
 			raise Exit
 		| EConst(Regexp _) when is_annotated (pos e) && is_completion ->
@@ -199,13 +199,13 @@ let find_before_pos dm e =
 	in
 	try map e with Exit -> e
 
-let find_display_call e =
+let find_display_call was_auto_triggered e =
 	let found = ref false in
 	let handle_el e el =
 		let call_arg_is_marked () =
 			el = [] || List.exists (fun (e,_) -> match e with EDisplay(_,DKMarked) -> true | _ -> false) el
 		in
-		if not !Parser.was_auto_triggered || call_arg_is_marked () then begin
+		if not was_auto_triggered || call_arg_is_marked () then begin
 		found := true;
 		Parser.mk_display_expr e DKCall
 		end else

+ 25 - 29
src/context/display/findReferences.ml

@@ -8,22 +8,20 @@ let find_possible_references tctx cs =
 	let name,_,kind = Display.ReferencePosition.get () in
 	ignore(SyntaxExplorer.explore_uncached_modules tctx cs [name,kind])
 
-let find_references tctx com with_definition pos_filters =
-	let t = Timer.timer ["display";"references";"collect"] in
-	let symbols,relations = Statistics.collect_statistics tctx pos_filters true in
-	t();
+let find_references com with_definition pos_filters =
+	let symbols,relations = Timer.time com.timer_ctx ["display";"references";"collect"] (Statistics.collect_statistics com pos_filters) true in
 	let rec loop acc (relations:(Statistics.relation * pos) list) = match relations with
 		| (Statistics.Referenced,p) :: relations when not (List.mem p acc) -> loop (p :: acc) relations
 		| _ :: relations -> loop acc relations
 		| [] -> acc
 	in
-	let t = Timer.timer ["display";"references";"filter"] in
-	let usages = Hashtbl.fold (fun p sym acc ->
-		let acc = if with_definition then p :: acc else acc in
-		(try loop acc (Hashtbl.find relations p)
-		with Not_found -> acc)
-	) symbols [] in
-	t();
+	let usages = Timer.time com.timer_ctx ["display";"references";"filter"] (fun () ->
+		Hashtbl.fold (fun p sym acc ->
+			let acc = if with_definition then p :: acc else acc in
+			(try loop acc (Hashtbl.find relations p)
+			with Not_found -> acc)
+		) symbols []
+	) () in
 	Display.ReferencePosition.reset();
 	usages
 
@@ -121,14 +119,14 @@ let rec collect_reference_positions com (name,pos,kind) =
 	| _ ->
 		[name,pos,kind]
 
-let find_references tctx com with_definition =
+let find_references com with_definition =
 	let pos_filters =
 		List.fold_left (fun acc (_,p,_) ->
 			if p = null_pos then acc
 			else Statistics.SFPos p :: acc
 		) [] (collect_reference_positions com (Display.ReferencePosition.get ()))
 	in
-	let usages = find_references tctx com with_definition pos_filters in
+	let usages = find_references com with_definition pos_filters in
 	let usages =
 		List.sort (fun p1 p2 ->
 			let c = compare p1.pfile p2.pfile in
@@ -137,29 +135,27 @@ let find_references tctx com with_definition =
 	in
 	DisplayException.raise_positions usages
 
-let find_implementations tctx com name pos kind =
-	let t = Timer.timer ["display";"implementations";"collect"] in
-	let symbols,relations = Statistics.collect_statistics tctx [SFPos pos] false in
-	t();
+let find_implementations com name pos kind =
+	let symbols,relations = Timer.time com.timer_ctx ["display";"implementations";"collect"] (Statistics.collect_statistics com [SFPos pos]) false in
 	let rec loop acc relations = match relations with
 		| ((Statistics.Implemented | Statistics.Overridden | Statistics.Extended),p) :: relations -> loop (p :: acc) relations
 		| _ :: relations -> loop acc relations
 		| [] -> acc
 	in
-	let t = Timer.timer ["display";"implementations";"filter"] in
-	let usages = Hashtbl.fold (fun p sym acc ->
-		(try loop acc (Hashtbl.find relations p)
-		with Not_found -> acc)
-	) symbols [] in
-	let usages = List.sort (fun p1 p2 ->
-		let c = compare p1.pfile p2.pfile in
-		if c <> 0 then c else compare p1.pmin p2.pmin
-	) usages in
-	t();
+	let usages = Timer.time com.timer_ctx ["display";"implementations";"filter"] (fun () ->
+		let usages = Hashtbl.fold (fun p sym acc ->
+			(try loop acc (Hashtbl.find relations p)
+			with Not_found -> acc)
+		) symbols [] in
+		List.sort (fun p1 p2 ->
+			let c = compare p1.pfile p2.pfile in
+			if c <> 0 then c else compare p1.pmin p2.pmin
+		) usages
+	) () in
 	Display.ReferencePosition.reset();
 	DisplayException.raise_positions usages
 
-let find_implementations tctx com =
+let find_implementations com =
 	let name,pos,kind = Display.ReferencePosition.get () in
-	if pos <> null_pos then find_implementations tctx com name pos kind
+	if pos <> null_pos then find_implementations com name pos kind
 	else DisplayException.raise_positions []

+ 6 - 5
src/context/display/importHandling.ml

@@ -81,7 +81,7 @@ let init_import ctx path mode p =
 	| (tname,p2) :: rest ->
 		let p1 = (match pack with [] -> p2 | (_,p1) :: _ -> p1) in
 		let p_type = punion p1 p2 in
-		let md = ctx.g.do_load_module ctx (List.map fst pack,tname) p_type in
+		let md = ctx.g.do_load_module ~origin:MDepFromImport ctx (List.map fst pack,tname) p_type in
 		let types = md.m_types in
 		let not_private mt = not (t_infos mt).mt_private in
 		let error_private p = raise_typing_error "Importing private declarations from a module is not allowed" p in
@@ -113,8 +113,9 @@ let init_import ctx path mode p =
 		let check_alias mt name pname =
 			if not (name.[0] >= 'A' && name.[0] <= 'Z') then
 				raise_typing_error "Type aliases must start with an uppercase letter" pname;
-			if ctx.m.is_display_file && DisplayPosition.display_position#enclosed_in pname then
-				DisplayEmitter.display_alias ctx name (type_of_module_type mt) pname;
+			(* Imports from import.hx should not match display position from current file *)
+			if ctx.m.is_display_file && DisplayPosition.display_position#enclosed_in pname && (Path.UniqueKey.create pname.pfile) = (Path.UniqueKey.lazy_key ctx.m.curmod.m_extra.m_file) then
+				DisplayEmitter.display_alias ctx name (type_of_module_type mt) pname
 		in
 		let add_static_init t name s =
 			match resolve_typedef t with
@@ -166,7 +167,7 @@ let init_import ctx path mode p =
 							check_alias tsub name pname;
 							Some name
 					in
-					ctx.m.import_resolution#add (module_type_resolution tsub alias p2);
+					ctx.m.import_resolution#add (module_type_resolution tsub alias p);
 				with Not_found ->
 					(* this might be a static property, wait later to check *)
 					let find_main_type_static () =
@@ -267,7 +268,7 @@ let handle_using ctx path p =
 	in
 	let types = (match t.tsub with
 		| None ->
-			let md = ctx.g.do_load_module ctx (t.tpackage,t.tname) p in
+			let md = ctx.g.do_load_module ~origin:MDepFromImport ctx (t.tpackage,t.tname) p in
 			let types = List.filter (fun t -> not (t_infos t).mt_private) md.m_types in
 			Option.map_default (fun c -> (TClassDecl c) :: types) types md.m_statics
 		| Some _ ->

+ 4 - 8
src/context/display/statistics.ml

@@ -15,7 +15,7 @@ type statistics_filter =
 	| SFPos of pos
 	| SFFile of string
 
-let collect_statistics ctx pos_filters with_expressions =
+let collect_statistics com pos_filters with_expressions =
 	let relations = Hashtbl.create 0 in
 	let symbols = Hashtbl.create 0 in
 	let handled_modules = Hashtbl.create 0 in
@@ -25,7 +25,7 @@ let collect_statistics ctx pos_filters with_expressions =
 			try
 				Hashtbl.find paths path
 			with Not_found ->
-				let unique = ctx.com.file_keys#get path in
+				let unique = com.file_keys#get path in
 				Hashtbl.add paths path unique;
 				unique
 		)
@@ -175,10 +175,6 @@ let collect_statistics ctx pos_filters with_expressions =
 			| TVar(v,eo) ->
 				Option.may loop eo;
 				var_decl v;
-			| TFor(v,e1,e2) ->
-				var_decl v;
-				loop e1;
-				loop e2;
 			| TFunction tf ->
 				List.iter (fun (v,_) -> var_decl v) tf.tf_args;
 				loop tf.tf_expr;
@@ -213,7 +209,7 @@ let collect_statistics ctx pos_filters with_expressions =
 		List.iter f com.types;
 		Option.may loop (com.get_macros())
 	in
-	loop ctx.com;
+	loop com;
 	(* find things *)
 	let f = function
 		| TClassDecl c ->
@@ -258,7 +254,7 @@ let collect_statistics ctx pos_filters with_expressions =
 		List.iter f com.types;
 		Option.may loop (com.get_macros())
 	in
-	loop ctx.com;
+	loop com;
 	(* TODO: Using syntax-exploration here is technically fine, but I worry about performance in real codebases. *)
 	(* let find_symbols = Hashtbl.fold (fun _ kind acc ->
 		let name = string_of_symbol kind in

+ 18 - 18
src/context/display/syntaxExplorer.ml

@@ -165,23 +165,23 @@ let explore_uncached_modules tctx cs symbols =
 	let cc = CommonCache.get_cache tctx.com in
 	let files = cc#get_files in
 	let modules = cc#get_modules in
-	let t = Timer.timer ["display";"references";"candidates"] in
-	let acc = Hashtbl.fold (fun file_key cfile acc ->
-		let module_name = get_module_name_of_cfile cfile.c_file_path.file cfile in
-		if Hashtbl.mem modules (cfile.c_package,module_name) then
-			acc
-		else try
-			find_in_syntax symbols (cfile.c_package,cfile.c_decls);
-			acc
-		with Exit ->
-			begin try
-				let m = tctx.g.do_load_module tctx (cfile.c_package,module_name) null_pos in
-				(* We have to flush immediately so we catch exceptions from weird modules *)
-				Typecore.flush_pass tctx.g Typecore.PFinal ("final",cfile.c_package @ [module_name]);
-				m :: acc
-			with _ ->
+	let acc = Timer.time tctx.com.timer_ctx ["display";"references";"candidates"] (fun () ->
+		Hashtbl.fold (fun file_key cfile acc ->
+			let module_name = get_module_name_of_cfile cfile.c_file_path.file cfile in
+			if Hashtbl.mem modules (cfile.c_package,module_name) then
 				acc
-			end
-	) files [] in
-	t();
+			else try
+				find_in_syntax symbols (cfile.c_package,cfile.c_decls);
+				acc
+			with Exit ->
+				begin try
+					let m = tctx.g.do_load_module tctx (cfile.c_package,module_name) null_pos in
+					(* We have to flush immediately so we catch exceptions from weird modules *)
+					Typecore.flush_pass tctx.g PFinal ("final",cfile.c_package @ [module_name]);
+					m :: acc
+				with _ ->
+					acc
+				end
+		) files []
+	) () in
 	acc

+ 3 - 3
src/context/formatString.ml

@@ -1,7 +1,7 @@
 open Globals
 open Ast
 
-let format_string defines s p process_expr =
+let format_string config s p process_expr =
 	let e = ref None in
 	let pmin = ref p.pmin in
 	let min = ref (p.pmin + 1) in
@@ -83,8 +83,8 @@ let format_string defines s p process_expr =
 					if Lexer.string_is_whitespace scode then Error.raise_typing_error "Expression cannot be empty" ep
 					else Error.raise_typing_error msg pos
 				in
-				match ParserEntry.parse_expr_string defines scode ep error true with
-					| ParseSuccess(data,_,_) -> data
+				match ParserEntry.parse_expr_string config scode ep error true with
+					| ParseSuccess(data,_) -> data
 					| ParseError(_,(msg,p),_) -> error (Parser.error_msg msg) p
 			in
 			add_expr e slen

+ 0 - 50
src/context/lookup.ml

@@ -7,21 +7,13 @@ class virtual ['key,'value] lookup = object(self)
 	method virtual fold : 'acc . ('key -> 'value -> 'acc -> 'acc) -> 'acc -> 'acc
 	method virtual mem : 'key -> bool
 	method virtual clear : unit
-
-	method virtual start_group : int
-	method virtual commit_group : int -> int
-	method virtual discard_group : int -> int
 end
 
 class ['key,'value] pmap_lookup = object(self)
 	inherit ['key,'value] lookup
 	val mutable lut : ('key,'value) PMap.t = PMap.empty
 
-	val mutable group_id : int ref = ref 0
-	val mutable groups : (int,'key list) PMap.t = PMap.empty
-
 	method add (key : 'key) (value : 'value) =
-		groups <- PMap.map (fun modules -> key :: modules) groups;
 		lut <- PMap.add key value lut
 
 	method remove (key : 'key) =
@@ -41,36 +33,13 @@ class ['key,'value] pmap_lookup = object(self)
 
 	method clear =
 		lut <- PMap.empty
-
-	method start_group =
-		incr group_id;
-		let i = !group_id in
-		groups <- PMap.add i [] groups;
-		i
-
-	method commit_group i =
-		let group = PMap.find i groups in
-		let n = List.length group in
-		groups <- PMap.remove i groups;
-		n
-
-	method discard_group i =
-		let group = PMap.find i groups in
-		let n = List.length group in
-		List.iter (fun mpath -> self#remove mpath) group;
-		groups <- PMap.remove i groups;
-		n
 end
 
 class ['key,'value] hashtbl_lookup = object(self)
 	inherit ['key,'value] lookup
 	val lut : ('key,'value) Hashtbl.t = Hashtbl.create 0
 
-	val mutable group_id : int ref = ref 0
-	val mutable groups : (int,'key list) Hashtbl.t = Hashtbl.create 0
-
 	method add (key : 'key) (value : 'value) =
-		Hashtbl.iter (fun i modules -> Hashtbl.replace groups i (key :: modules)) groups;
 		Hashtbl.replace lut key value
 
 	method remove (key : 'key) =
@@ -90,24 +59,5 @@ class ['key,'value] hashtbl_lookup = object(self)
 
 	method clear =
 		Hashtbl.clear lut
-
-	method start_group =
-		incr group_id;
-		let i = !group_id in
-		Hashtbl.replace groups i [];
-		i
-
-	method commit_group i =
-		let group = Hashtbl.find groups i in
-		let n = List.length group in
-		Hashtbl.remove groups i;
-		n
-
-	method discard_group i =
-		let group = Hashtbl.find groups i in
-		let n = List.length group in
-		List.iter (fun mpath -> self#remove mpath) group;
-		Hashtbl.remove groups i;
-		n
 end
 

+ 0 - 197
src/context/memory.ml

@@ -1,7 +1,6 @@
 open Globals
 open Common
 open Type
-open Genjson
 
 type memory_request =
 	| MCache
@@ -94,199 +93,3 @@ let fmt_word f =
 
 let size v =
 	fmt_size (mem_size v)
-
-let get_memory_json (cs : CompilationCache.t) mreq =
-	begin match mreq with
-	| MCache ->
-		let old_gc = Gc.get() in
-		Gc.set { old_gc with
-			Gc.max_overhead = 0;
-			Gc.space_overhead = 0
-		};
-		Gc.compact();
-		Gc.set old_gc;
-		let stat = Gc.quick_stat() in
-		let size = (float_of_int stat.Gc.heap_words) *. (float_of_int (Sys.word_size / 8)) in
-		let cache_mem = cs#get_pointers in
-		let contexts = cs#get_contexts in
-		let j_contexts = List.map (fun cc -> jobject [
-			"context",cc#get_json;
-			"size",jint (mem_size cc);
-		]) contexts in
-		jobject [
-			"contexts",jarray j_contexts;
-			"memory",jobject [
-				"totalCache",jint (mem_size cs);
-				"contextCache",jint (mem_size cache_mem.(0));
-				"haxelibCache",jint (mem_size cache_mem.(1));
-				"directoryCache",jint (mem_size cache_mem.(2));
-				"nativeLibCache",jint (mem_size cache_mem.(3));
-				"additionalSizes",jarray [
-					jobject ["name",jstring "macro interpreter";"size",jint (mem_size (MacroContext.macro_interp_cache))];
-					(* jobject ["name",jstring "macro stdlib";"size",jint (mem_size (EvalContext.GlobalState.stdlib))];
-					jobject ["name",jstring "macro macro_lib";"size",jint (mem_size (EvalContext.GlobalState.macro_lib))]; *)
-					jobject ["name",jstring "last completion result";"size",jint (mem_size (DisplayException.last_completion_result))];
-					jobject ["name",jstring "Lexer file cache";"size",jint (mem_size (Lexer.all_files))];
-					jobject ["name",jstring "GC heap words";"size",jint (int_of_float size)];
-				];
-			]
-		]
-	| MContext sign ->
-		let cc = cs#get_context sign in
-		let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty cs#get_modules in
-		let l = Hashtbl.fold (fun _ m acc ->
-			(m,(get_module_memory cs all_modules m)) :: acc
-		) cc#get_modules [] in
-		let l = List.sort (fun (_,(size1,_)) (_,(size2,_)) -> compare size2 size1) l in
-		let leaks = ref [] in
-		let l = List.map (fun (m,(size,(reached,_,_,mleaks))) ->
-			if reached then leaks := (m,mleaks) :: !leaks;
-			jobject [
-				"path",jstring (s_type_path m.m_path);
-				"size",jint size;
-				"hasTypes",jbool (match m.m_extra.m_kind with MCode | MMacro -> true | _ -> false);
-			]
-		) l in
-		let leaks = match !leaks with
-			| [] -> jnull
-			| leaks ->
-				let jleaks = List.map (fun (m,leaks) ->
-					let jleaks = List.map (fun s -> jobject ["path",jstring s]) leaks in
-					jobject [
-						"path",jstring (s_type_path m.m_path);
-						"leaks",jarray jleaks;
-					]
-				) leaks in
-				jarray jleaks
-		in
-		let cache_mem = cc#get_pointers in
-		jobject [
-			"leaks",leaks;
-			"syntaxCache",jobject [
-				"size",jint (mem_size cache_mem.(0));
-			];
-			"moduleCache",jobject [
-				"size",jint (mem_size cache_mem.(1));
-				"list",jarray l;
-			];
-			"binaryCache",jobject [
-				"size",jint (mem_size cache_mem.(2));
-			];
-		]
-	| MModule(sign,path) ->
-		let cc = cs#get_context sign in
-		let m = cc#find_module path in
-		let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty cs#get_modules in
-		let _,(_,deps,out,_) = get_module_memory cs all_modules m in
-		let deps = update_module_type_deps deps m in
-		let out = get_out out in
-		let types = List.map (fun md ->
-			let fields,inf = match md with
-				| TClassDecl c ->
-					let own_deps = ref deps in
-					let field acc cf =
-						let repr = Obj.repr cf in
-						own_deps := List.filter (fun repr' -> repr != repr') !own_deps;
-						let deps = List.filter (fun repr' -> repr' != repr) deps in
-						let size = Objsize.size_with_headers (Objsize.objsize cf deps out) in
-						(cf,size) :: acc
-					in
-					let fields = List.fold_left field [] c.cl_ordered_fields in
-					let fields = List.fold_left field fields c.cl_ordered_statics in
-					let fields = List.sort (fun (_,size1) (_,size2) -> compare size2 size1) fields in
-					let fields = List.map (fun (cf,size) ->
-						jobject [
-							"name",jstring cf.cf_name;
-							"size",jint size;
-							"pos",generate_pos_as_location cf.cf_name_pos;
-						]
-					) fields in
-					let repr = Obj.repr c in
-					let deps = List.filter (fun repr' -> repr' != repr) !own_deps in
-					fields,Objsize.objsize c deps out
-				| TEnumDecl en ->
-					let repr = Obj.repr en in
-					let deps = List.filter (fun repr' -> repr' != repr) deps in
-					[],Objsize.objsize en deps out
-				| TTypeDecl td ->
-					let repr = Obj.repr td in
-					let deps = List.filter (fun repr' -> repr' != repr) deps in
-					[],Objsize.objsize td deps out
-				| TAbstractDecl a ->
-					let repr = Obj.repr a in
-					let deps = List.filter (fun repr' -> repr' != repr) deps in
-					[],Objsize.objsize a deps out
-			in
-			let size = Objsize.size_with_headers inf in
-			let jo = jobject [
-				"name",jstring (s_type_path (t_infos md).mt_path);
-				"size",jint size;
-				"pos",generate_pos_as_location (t_infos md).mt_name_pos;
-				"fields",jarray fields;
-			] in
-			size,jo
-		) m.m_types in
-		let types = List.sort (fun (size1,_) (size2,_) -> compare size2 size1) types in
-		let types = List.map snd types in
-		jobject [
-			"moduleExtra",jint (Objsize.size_with_headers (Objsize.objsize m.m_extra deps out));
-			"types",jarray types;
-		]
-	end
-
-let display_memory com =
-	let verbose = com.verbose in
-	let print = print_endline in
-	Gc.full_major();
-	Gc.compact();
-	let mem = Gc.stat() in
-	print ("Total Allocated Memory " ^ fmt_size (mem.Gc.heap_words * (Sys.word_size asr 8)));
-	print ("Free Memory " ^ fmt_size (mem.Gc.free_words * (Sys.word_size asr 8)));
-	let c = com.cs in
-	print ("Total cache size " ^ size c);
-	(* print ("  haxelib " ^ size c.c_haxelib); *)
-	(* print ("  parsed ast " ^ size c.c_files ^ " (" ^ string_of_int (Hashtbl.length c.c_files) ^ " files stored)"); *)
-	(* print ("  typed modules " ^ size c.c_modules ^ " (" ^ string_of_int (Hashtbl.length c.c_modules) ^ " modules stored)"); *)
-	let module_list = c#get_modules in
-	let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty module_list in
-	let modules = List.fold_left (fun acc m ->
-		let (size,r) = get_module_memory c all_modules m in
-		(m,size,r) :: acc
-	) [] module_list in
-	let cur_key = ref "" and tcount = ref 0 and mcount = ref 0 in
-	List.iter (fun (m,size,(reached,deps,out,leaks)) ->
-		let key = m.m_extra.m_sign in
-		if key <> !cur_key then begin
-			print (Printf.sprintf ("    --- CONFIG %s ----------------------------") (Digest.to_hex key));
-			cur_key := key;
-		end;
-		print (Printf.sprintf "    %s : %s" (s_type_path m.m_path) (fmt_size size));
-		(if reached then try
-			incr mcount;
-			let lcount = ref 0 in
-			let leak l =
-				incr lcount;
-				incr tcount;
-				print (Printf.sprintf "      LEAK %s" l);
-				if !lcount >= 3 && !tcount >= 100 && not verbose then begin
-					print (Printf.sprintf "      ...");
-					raise Exit;
-				end;
-			in
-			List.iter leak leaks;
-		with Exit ->
-			());
-		if verbose then begin
-			print (Printf.sprintf "      %d total deps" (List.length deps));
-			PMap.iter (fun _ mdep ->
-				let md = (com.cs#get_context mdep.md_sign)#find_module mdep.md_path in
-				print (Printf.sprintf "      dep %s%s" (s_type_path mdep.md_path) (module_sign key md));
-			) m.m_extra.m_deps;
-		end;
-		flush stdout
-	) (List.sort (fun (m1,s1,_) (m2,s2,_) ->
-		let k1 = m1.m_extra.m_sign and k2 = m2.m_extra.m_sign in
-		if k1 = k2 then s1 - s2 else if k1 > k2 then 1 else -1
-	) modules);
-	if !mcount > 0 then print ("*** " ^ string_of_int !mcount ^ " modules have leaks !");
-	print "Cache dump complete"

+ 5 - 1
src/context/nativeLibraries.ml

@@ -23,7 +23,7 @@ type native_lib_flags =
 	| FlagIsStd
 	| FlagIsExtern
 
-class virtual ['a,'data] native_library (name : string) (file_path : string) = object(self)
+class virtual native_library_base (name : string) (file_path : string) = object(self)
 	val mutable flags : native_lib_flags list = []
 
 	method add_flag flag = flags <- flag :: flags
@@ -31,6 +31,10 @@ class virtual ['a,'data] native_library (name : string) (file_path : string) = o
 
 	method get_name = name
 	method get_file_path = file_path
+end
+
+class virtual ['a,'data] native_library (name : string) (file_path : string) = object(self)
+	inherit native_library_base name file_path
 
 	method virtual build : path -> pos -> Ast.package option
 	method virtual close : unit

Certains fichiers n'ont pas été affichés car il y a eu trop de fichiers modifiés dans ce diff