Procházet zdrojové kódy

Merge branch 'development' into avoid-object-anons

# Conflicts:
#	src/generators/genhl.ml
Simon Krajewski před 5 měsíci
rodič
revize
f7a9624564
100 změnil soubory, kde provedl 2705 přidání a 3728 odebrání
  1. 0 24
      .devcontainer/devcontainer.json
  2. 0 28
      .devcontainer/docker-compose.yml
  3. 0 454
      .devcontainer/library-scripts/common-debian.sh
  4. 0 309
      .devcontainer/library-scripts/docker-debian.sh
  5. 0 6
      .earthlyignore
  6. 0 14
      .github/workflows/cancel.yml
  7. 195 112
      .github/workflows/main.yml
  8. 1 0
      .gitignore
  9. 5 1
      .vscode/settings.json
  10. 4 4
      .vscode/tasks.json
  11. 0 412
      Earthfile
  12. 3 15
      Makefile
  13. 0 4
      Makefile.win
  14. 96 0
      WinSetup.ps1
  15. 2 2
      dune
  16. 0 4
      dune-project
  17. 0 2
      dune-workspace.dev
  18. 2 5
      extra/github-actions/build-mac.yml
  19. 9 25
      extra/github-actions/build-windows.yml
  20. 15 5
      extra/github-actions/install-ocaml-libs-windows.yml
  21. 7 31
      extra/github-actions/install-ocaml-windows.yml
  22. 0 20
      extra/github-actions/install-ocaml-windows64.yml
  23. 123 66
      extra/github-actions/workflows/main.yml
  24. 5 4
      haxe.opam
  25. 0 18
      libs/.gitignore
  26. 0 23
      libs/Makefile
  27. 0 30
      libs/extc/Makefile
  28. 1 7
      libs/extc/process_stubs.c
  29. 0 35
      libs/extlib-leftovers/Makefile
  30. 0 23
      libs/neko/Makefile
  31. 0 29
      libs/objsize/Makefile
  32. 0 28
      libs/pcre2/Makefile
  33. 0 81
      libs/swflib/Makefile
  34. 0 22
      libs/ziplib/Makefile
  35. 0 7
      libs/ziplib/test/Makefile
  36. 52 10
      src-json/define.json
  37. 7 1
      src-json/warning.json
  38. 0 14
      src-prebuild/dune
  39. 0 345
      src/codegen/codegen.ml
  40. 223 0
      src/codegen/dump.ml
  41. 132 0
      src/codegen/fixOverrides.ml
  42. 16 13
      src/codegen/genxml.ml
  43. 26 25
      src/codegen/javaModern.ml
  44. 8 6
      src/codegen/swfLoader.ml
  45. 10 7
      src/compiler/args.ml
  46. 32 19
      src/compiler/compilationCache.ml
  47. 3 1
      src/compiler/compilationContext.ml
  48. 88 56
      src/compiler/compiler.ml
  49. 7 8
      src/compiler/displayOutput.ml
  50. 8 20
      src/compiler/displayProcessing.ml
  51. 5 0
      src/compiler/dune
  52. 36 40
      src/compiler/generate.ml
  53. 2 3
      src/compiler/haxe.ml
  54. 0 2
      src/compiler/helper.ml
  55. 21 29
      src/compiler/hxb/hxbLib.ml
  56. 187 118
      src/compiler/hxb/hxbReader.ml
  57. 2 0
      src/compiler/hxb/hxbReaderApi.ml
  58. 46 81
      src/compiler/hxb/hxbWriter.ml
  59. 0 2
      src/compiler/hxb/hxbWriterConfig.ml
  60. 14 14
      src/compiler/messageReporting.ml
  61. 53 60
      src/compiler/server.ml
  62. 2 10
      src/compiler/serverCompilationContext.ml
  63. 2 2
      src/compiler/tasks.ml
  64. 23 20
      src/context/abstractCast.ml
  65. 57 142
      src/context/common.ml
  66. 21 14
      src/context/commonCache.ml
  67. 1 1
      src/context/display/diagnostics.ml
  68. 2 2
      src/context/display/display.ml
  69. 3 1
      src/context/display/displayFields.ml
  70. 25 23
      src/context/display/displayJson.ml
  71. 256 0
      src/context/display/displayMemory.ml
  72. 7 7
      src/context/display/displayTexpr.ml
  73. 211 211
      src/context/display/displayToplevel.ml
  74. 4 4
      src/context/display/exprPreprocessing.ml
  75. 25 29
      src/context/display/findReferences.ml
  76. 3 2
      src/context/display/importHandling.ml
  77. 4 8
      src/context/display/statistics.ml
  78. 18 18
      src/context/display/syntaxExplorer.ml
  79. 3 3
      src/context/formatString.ml
  80. 0 50
      src/context/lookup.ml
  81. 0 252
      src/context/memory.ml
  82. 28 0
      src/context/parallel.ml
  83. 123 0
      src/context/platformConfig.ml
  84. 166 0
      src/context/safeCom.ml
  85. 48 62
      src/context/typecore.ml
  86. 14 0
      src/context/typerPass.ml
  87. 10 9
      src/core/abstract.ml
  88. 9 1
      src/core/define.ml
  89. 5 8
      src/core/display/completionItem.ml
  90. 29 0
      src/core/ds/nowOrLater.ml
  91. 1 10
      src/core/ds/stringPool.ml
  92. 18 0
      src/core/ds/threadSafeHashtbl.ml
  93. 3 3
      src/core/dune
  94. 1 3
      src/core/error.ml
  95. 37 6
      src/core/globals.ml
  96. 1 1
      src/core/inheritDoc.ml
  97. 10 16
      src/core/json/genjson.ml
  98. 0 85
      src/core/naming.ml
  99. 86 0
      src/core/native.ml
  100. 3 1
      src/core/path.ml

+ 0 - 24
.devcontainer/devcontainer.json

@@ -1,24 +0,0 @@
-// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
-// https://github.com/microsoft/vscode-dev-containers/tree/v0.202.5/containers/docker-from-docker-compose
-{
-	"name": "haxe",
-	"dockerComposeFile": "docker-compose.yml",
-	"service": "workspace",
-	"workspaceFolder": "/workspace",
-
-	// Use this environment variable if you need to bind mount your local source code into a new container.
-	"remoteEnv": {
-		"LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}"
-	},
-	
-	// Set *default* container specific settings.json values on container create.
-	"settings": {},
-
-	"extensions": [
-		"nadako.vshaxe",
-		"ms-azuretools.vscode-docker",
-		"earthly.earthfile-syntax-highlighting",
-	],
-
-	"remoteUser": "vscode"
-}

+ 0 - 28
.devcontainer/docker-compose.yml

@@ -1,28 +0,0 @@
-version: '3'
-services:
-  workspace:
-    image: ghcr.io/haxefoundation/haxe_devcontainer:development
-    init: true
-    volumes:
-      - /var/run/docker.sock:/var/run/docker-host.sock
-      - ..:/workspace:cached
-    environment:
-      - EARTHLY_BUILDKIT_HOST=tcp://earthly:8372
-      - EARTHLY_USE_INLINE_CACHE=true
-      - EARTHLY_SAVE_INLINE_CACHE=true
-    user: vscode
-    entrypoint: /usr/local/share/docker-init.sh
-    command: sleep infinity
-  earthly:
-    image: earthly/buildkitd:v0.6.13
-    privileged: true
-    environment:
-      - BUILDKIT_TCP_TRANSPORT_ENABLED=true
-    expose:
-      - 8372
-    volumes:
-      # https://docs.earthly.dev/docs/guides/using-the-earthly-docker-images/buildkit-standalone#earthly_tmp_dir
-      - earthly-tmp:/tmp/earthly:rw
-
-volumes:
-  earthly-tmp:

+ 0 - 454
.devcontainer/library-scripts/common-debian.sh

@@ -1,454 +0,0 @@
-#!/usr/bin/env bash
-#-------------------------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
-#-------------------------------------------------------------------------------------------------------------
-#
-# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
-# Maintainer: The VS Code and Codespaces Teams
-#
-# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
-
-set -e
-
-INSTALL_ZSH=${1:-"true"}
-USERNAME=${2:-"automatic"}
-USER_UID=${3:-"automatic"}
-USER_GID=${4:-"automatic"}
-UPGRADE_PACKAGES=${5:-"true"}
-INSTALL_OH_MYS=${6:-"true"}
-ADD_NON_FREE_PACKAGES=${7:-"false"}
-SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
-MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
-
-if [ "$(id -u)" -ne 0 ]; then
-    echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
-    exit 1
-fi
-
-# Ensure that login shells get the correct path if the user updated the PATH using ENV.
-rm -f /etc/profile.d/00-restore-env.sh
-echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
-chmod +x /etc/profile.d/00-restore-env.sh
-
-# If in automatic mode, determine if a user already exists, if not use vscode
-if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
-    USERNAME=""
-    POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
-    for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
-        if id -u ${CURRENT_USER} > /dev/null 2>&1; then
-            USERNAME=${CURRENT_USER}
-            break
-        fi
-    done
-    if [ "${USERNAME}" = "" ]; then
-        USERNAME=vscode
-    fi
-elif [ "${USERNAME}" = "none" ]; then
-    USERNAME=root
-    USER_UID=0
-    USER_GID=0
-fi
-
-# Load markers to see which steps have already run
-if [ -f "${MARKER_FILE}" ]; then
-    echo "Marker file found:"
-    cat "${MARKER_FILE}"
-    source "${MARKER_FILE}"
-fi
-
-# Ensure apt is in non-interactive to avoid prompts
-export DEBIAN_FRONTEND=noninteractive
-
-# Function to call apt-get if needed
-apt_get_update_if_needed()
-{
-    if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
-        echo "Running apt-get update..."
-        apt-get update
-    else
-        echo "Skipping apt-get update."
-    fi
-}
-
-# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
-if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
-
-    package_list="apt-utils \
-        openssh-client \
-        gnupg2 \
-        dirmngr \
-        iproute2 \
-        procps \
-        lsof \
-        htop \
-        net-tools \
-        psmisc \
-        curl \
-        wget \
-        rsync \
-        ca-certificates \
-        unzip \
-        zip \
-        nano \
-        vim-tiny \
-        less \
-        jq \
-        lsb-release \
-        apt-transport-https \
-        dialog \
-        libc6 \
-        libgcc1 \
-        libkrb5-3 \
-        libgssapi-krb5-2 \
-        libicu[0-9][0-9] \
-        liblttng-ust0 \
-        libstdc++6 \
-        zlib1g \
-        locales \
-        sudo \
-        ncdu \
-        man-db \
-        strace \
-        manpages \
-        manpages-dev \
-        init-system-helpers"
-        
-    # Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
-    if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
-        # Bring in variables from /etc/os-release like VERSION_CODENAME
-        . /etc/os-release
-        sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
-        sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
-        sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
-        sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
-        sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
-        sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
-        sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list 
-        sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
-        # Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
-        sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
-        sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
-        echo "Running apt-get update..."
-        apt-get update
-        package_list="${package_list} manpages-posix manpages-posix-dev"
-    else
-        apt_get_update_if_needed
-    fi
-
-    # Install libssl1.1 if available
-    if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
-        package_list="${package_list}       libssl1.1"
-    fi
-    
-    # Install appropriate version of libssl1.0.x if available
-    libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
-    if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
-        if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
-            # Debian 9
-            package_list="${package_list}       libssl1.0.2"
-        elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
-            # Ubuntu 18.04, 16.04, earlier
-            package_list="${package_list}       libssl1.0.0"
-        fi
-    fi
-
-    echo "Packages to verify are installed: ${package_list}"
-    apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
-        
-    # Install git if not already installed (may be more recent than distro version)
-    if ! type git > /dev/null 2>&1; then
-        apt-get -y install --no-install-recommends git
-    fi
-
-    PACKAGES_ALREADY_INSTALLED="true"
-fi
-
-# Get to latest versions of all packages
-if [ "${UPGRADE_PACKAGES}" = "true" ]; then
-    apt_get_update_if_needed
-    apt-get -y upgrade --no-install-recommends
-    apt-get autoremove -y
-fi
-
-# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
-# Common need for both applications and things like the agnoster ZSH theme.
-if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
-    echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen 
-    locale-gen
-    LOCALE_ALREADY_SET="true"
-fi
-
-# Create or update a non-root user to match UID/GID.
-group_name="${USERNAME}"
-if id -u ${USERNAME} > /dev/null 2>&1; then
-    # User exists, update if needed
-    if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then 
-        group_name="$(id -gn $USERNAME)"
-        groupmod --gid $USER_GID ${group_name}
-        usermod --gid $USER_GID $USERNAME
-    fi
-    if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then 
-        usermod --uid $USER_UID $USERNAME
-    fi
-else
-    # Create user
-    if [ "${USER_GID}" = "automatic" ]; then
-        groupadd $USERNAME
-    else
-        groupadd --gid $USER_GID $USERNAME
-    fi
-    if [ "${USER_UID}" = "automatic" ]; then 
-        useradd -s /bin/bash --gid $USERNAME -m $USERNAME
-    else
-        useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
-    fi
-fi
-
-# Add add sudo support for non-root user
-if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
-    echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
-    chmod 0440 /etc/sudoers.d/$USERNAME
-    EXISTING_NON_ROOT_USER="${USERNAME}"
-fi
-
-# ** Shell customization section **
-if [ "${USERNAME}" = "root" ]; then 
-    user_rc_path="/root"
-else
-    user_rc_path="/home/${USERNAME}"
-fi
-
-# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
-if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
-    cp  /etc/skel/.bashrc "${user_rc_path}/.bashrc"
-fi
-
-# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
-if  [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
-    cp  /etc/skel/.profile "${user_rc_path}/.profile"
-fi
-
-# .bashrc/.zshrc snippet
-rc_snippet="$(cat << 'EOF'
-
-if [ -z "${USER}" ]; then export USER=$(whoami); fi
-if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
-
-# Display optional first run image specific notice if configured and terminal is interactive
-if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
-    if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
-        cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
-    elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
-        cat "/workspaces/.codespaces/shared/first-run-notice.txt"
-    fi
-    mkdir -p "$HOME/.config/vscode-dev-containers"
-    # Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
-    ((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
-fi
-
-# Set the default git editor if not already set
-if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
-    if  [ "${TERM_PROGRAM}" = "vscode" ]; then
-        if [[ -n $(command -v code-insiders) &&  -z $(command -v code) ]]; then 
-            export GIT_EDITOR="code-insiders --wait"
-        else 
-            export GIT_EDITOR="code --wait"
-        fi
-    fi
-fi
-
-EOF
-)"
-
-# code shim, it fallbacks to code-insiders if code is not available
-cat << 'EOF' > /usr/local/bin/code
-#!/bin/sh
-
-get_in_path_except_current() {
-    which -a "$1" | grep -A1 "$0" | grep -v "$0"
-}
-
-code="$(get_in_path_except_current code)"
-
-if [ -n "$code" ]; then
-    exec "$code" "$@"
-elif [ "$(command -v code-insiders)" ]; then
-    exec code-insiders "$@"
-else
-    echo "code or code-insiders is not installed" >&2
-    exit 127
-fi
-EOF
-chmod +x /usr/local/bin/code
-
-# systemctl shim - tells people to use 'service' if systemd is not running
-cat << 'EOF' > /usr/local/bin/systemctl
-#!/bin/sh
-set -e
-if [ -d "/run/systemd/system" ]; then
-    exec /bin/systemctl/systemctl "$@"
-else
-    echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services intead. e.g.: \n\nservice --status-all'
-fi
-EOF
-chmod +x /usr/local/bin/systemctl
-
-# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
-codespaces_bash="$(cat \
-<<'EOF'
-
-# Codespaces bash prompt theme
-__bash_prompt() {
-    local userpart='`export XIT=$? \
-        && [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
-        && [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
-    local gitbranch='`\
-        if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
-            export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
-            if [ "${BRANCH}" != "" ]; then \
-                echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
-                && if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
-                        echo -n " \[\033[1;33m\]✗"; \
-                fi \
-                && echo -n "\[\033[0;36m\]) "; \
-            fi; \
-        fi`'
-    local lightblue='\[\033[1;34m\]'
-    local removecolor='\[\033[0m\]'
-    PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
-    unset -f __bash_prompt
-}
-__bash_prompt
-
-EOF
-)"
-
-codespaces_zsh="$(cat \
-<<'EOF'
-# Codespaces zsh prompt theme
-__zsh_prompt() {
-    local prompt_username
-    if [ ! -z "${GITHUB_USER}" ]; then 
-        prompt_username="@${GITHUB_USER}"
-    else
-        prompt_username="%n"
-    fi
-    PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
-    PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
-    PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
-    PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
-    unset -f __zsh_prompt
-}
-ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
-ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
-ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
-ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
-__zsh_prompt
-
-EOF
-)"
-
-# Add RC snippet and custom bash prompt
-if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
-    echo "${rc_snippet}" >> /etc/bash.bashrc
-    echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
-    echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
-    if [ "${USERNAME}" != "root" ]; then
-        echo "${codespaces_bash}" >> "/root/.bashrc"
-        echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
-    fi
-    chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
-    RC_SNIPPET_ALREADY_ADDED="true"
-fi
-
-# Optionally install and configure zsh and Oh My Zsh!
-if [ "${INSTALL_ZSH}" = "true" ]; then
-    if ! type zsh > /dev/null 2>&1; then
-        apt_get_update_if_needed
-        apt-get install -y zsh
-    fi
-    if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
-        echo "${rc_snippet}" >> /etc/zsh/zshrc
-        ZSH_ALREADY_INSTALLED="true"
-    fi
-
-    # Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
-    # See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
-    oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
-    if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
-        template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
-        user_rc_file="${user_rc_path}/.zshrc"
-        umask g-w,o-w
-        mkdir -p ${oh_my_install_dir}
-        git clone --depth=1 \
-            -c core.eol=lf \
-            -c core.autocrlf=false \
-            -c fsck.zeroPaddedFilemode=ignore \
-            -c fetch.fsck.zeroPaddedFilemode=ignore \
-            -c receive.fsck.zeroPaddedFilemode=ignore \
-            "https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
-        echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
-        sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
-
-        mkdir -p ${oh_my_install_dir}/custom/themes
-        echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
-        # Shrink git while still enabling updates
-        cd "${oh_my_install_dir}"
-        git repack -a -d -f --depth=1 --window=1
-        # Copy to non-root user if one is specified
-        if [ "${USERNAME}" != "root" ]; then
-            cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
-            chown -R ${USERNAME}:${group_name} "${user_rc_path}"
-        fi
-    fi
-fi
-
-# Persist image metadata info, script if meta.env found in same directory
-meta_info_script="$(cat << 'EOF'
-#!/bin/sh
-. /usr/local/etc/vscode-dev-containers/meta.env
-
-# Minimal output
-if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
-    echo "${VERSION}"
-    exit 0
-elif [ "$1" = "release" ]; then
-    echo "${GIT_REPOSITORY_RELEASE}"
-    exit 0
-elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
-    echo "${CONTENTS_URL}"
-    exit 0
-fi
-
-#Full output
-echo
-echo "Development container image information"
-echo
-if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
-if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
-if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
-if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
-if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
-if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
-if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
-echo
-EOF
-)"
-if [ -f "${SCRIPT_DIR}/meta.env" ]; then
-    mkdir -p /usr/local/etc/vscode-dev-containers/
-    cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
-    echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
-    chmod +x /usr/local/bin/devcontainer-info
-fi
-
-# Write marker file
-mkdir -p "$(dirname "${MARKER_FILE}")"
-echo -e "\
-    PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
-    LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
-    EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
-    RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
-    ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
-
-echo "Done!"

+ 0 - 309
.devcontainer/library-scripts/docker-debian.sh

@@ -1,309 +0,0 @@
-#!/usr/bin/env bash
-#-------------------------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
-#-------------------------------------------------------------------------------------------------------------
-#
-# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker.md
-# Maintainer: The VS Code and Codespaces Teams
-#
-# Syntax: ./docker-debian.sh [enable non-root docker socket access flag] [source socket] [target socket] [non-root user] [use moby] [CLI version]
-
-ENABLE_NONROOT_DOCKER=${1:-"true"}
-SOURCE_SOCKET=${2:-"/var/run/docker-host.sock"}
-TARGET_SOCKET=${3:-"/var/run/docker.sock"}
-USERNAME=${4:-"automatic"}
-USE_MOBY=${5:-"true"}
-DOCKER_VERSION=${6:-"latest"}
-MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
-DOCKER_DASH_COMPOSE_VERSION="1"
-
-set -e
-
-if [ "$(id -u)" -ne 0 ]; then
-    echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
-    exit 1
-fi
-
-# Determine the appropriate non-root user
-if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
-    USERNAME=""
-    POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
-    for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
-        if id -u ${CURRENT_USER} > /dev/null 2>&1; then
-            USERNAME=${CURRENT_USER}
-            break
-        fi
-    done
-    if [ "${USERNAME}" = "" ]; then
-        USERNAME=root
-    fi
-elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
-    USERNAME=root
-fi
-
-# Get central common setting
-get_common_setting() {
-    if [ "${common_settings_file_loaded}" != "true" ]; then
-        curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
-        common_settings_file_loaded=true
-    fi
-    if [ -f "/tmp/vsdc-settings.env" ]; then
-        local multi_line=""
-        if [ "$2" = "true" ]; then multi_line="-z"; fi
-        local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
-        if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
-    fi
-    echo "$1=${!1}"
-}
-
-# Function to run apt-get if needed
-apt_get_update_if_needed()
-{
-    if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
-        echo "Running apt-get update..."
-        apt-get update
-    else
-        echo "Skipping apt-get update."
-    fi
-}
-
-# Checks if packages are installed and installs them if not
-check_packages() {
-    if ! dpkg -s "$@" > /dev/null 2>&1; then
-        apt_get_update_if_needed
-        apt-get -y install --no-install-recommends "$@"
-    fi
-}
-
-# Figure out correct version of a three part version number is not passed
-find_version_from_git_tags() {
-    local variable_name=$1
-    local requested_version=${!variable_name}
-    if [ "${requested_version}" = "none" ]; then return; fi
-    local repository=$2
-    local prefix=${3:-"tags/v"}
-    local separator=${4:-"."}
-    local last_part_optional=${5:-"false"}    
-    if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
-        local escaped_separator=${separator//./\\.}
-        local last_part
-        if [ "${last_part_optional}" = "true" ]; then
-            last_part="(${escaped_separator}[0-9]+)?"
-        else
-            last_part="${escaped_separator}[0-9]+"
-        fi
-        local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
-        local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
-        if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
-            declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
-        else
-            set +e
-            declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
-            set -e
-        fi
-    fi
-    if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then
-        echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
-        exit 1
-    fi
-    echo "${variable_name}=${!variable_name}"
-}
-
-# Ensure apt is in non-interactive to avoid prompts
-export DEBIAN_FRONTEND=noninteractive
-
-# Install dependencies
-check_packages apt-transport-https curl ca-certificates gnupg2 dirmngr
-if ! type git > /dev/null 2>&1; then
-    apt_get_update_if_needed
-    apt-get -y install git
-fi
-
-# Source /etc/os-release to get OS info
-. /etc/os-release
-# Fetch host/container arch.
-architecture="$(dpkg --print-architecture)"
-
-# Set up the necessary apt repos (either Microsoft's or Docker's)
-if [ "${USE_MOBY}" = "true" ]; then
-
-    cli_package_name="moby-cli"
-
-    # Import key safely and import Microsoft apt repo
-    get_common_setting MICROSOFT_GPG_KEYS_URI
-    curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
-    echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
-else
-    # Name of proprietary engine package
-    cli_package_name="docker-ce-cli"
-
-    # Import key safely and import Docker apt repo
-    curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg
-    echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list
-fi
-
-# Refresh apt lists
-apt-get update
-
-# Soft version matching for CLI
-if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then
-    # Empty, meaning grab whatever "latest" is in apt repo
-    cli_version_suffix=""
-else    
-    # Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...)
-    docker_version_dot_escaped="${DOCKER_VERSION//./\\.}"
-    docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}"
-    # Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/
-    docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)"
-    set +e # Don't exit if finding version fails - will handle gracefully
-    cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")"
-    set -e
-    if [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ]; then
-        echo "(!) No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:"
-        apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+'
-        exit 1
-    fi
-    echo "cli_version_suffix ${cli_version_suffix}"
-fi
-
-# Install Docker / Moby CLI if not already installed
-if type docker > /dev/null 2>&1; then
-    echo "Docker / Moby CLI already installed."
-else
-    if [ "${USE_MOBY}" = "true" ]; then
-        apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx
-        apt-get -y install --no-install-recommends moby-compose || echo "(*) Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping."
-    else
-        apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix}
-    fi
-fi
-
-# Install Docker Compose if not already installed  and is on a supported architecture
-if type docker-compose > /dev/null 2>&1; then
-    echo "Docker Compose already installed."
-else
-    TARGET_COMPOSE_ARCH="$(uname -m)"
-    if [ "${TARGET_COMPOSE_ARCH}" = "amd64" ]; then
-        TARGET_COMPOSE_ARCH="x86_64"
-    fi
-    if [ "${TARGET_COMPOSE_ARCH}" != "x86_64" ]; then
-        # Use pip to get a version that runns on this architecture
-        if ! dpkg -s python3-minimal python3-pip libffi-dev python3-venv > /dev/null 2>&1; then
-            apt_get_update_if_needed
-            apt-get -y install python3-minimal python3-pip libffi-dev python3-venv
-        fi
-        export PIPX_HOME=/usr/local/pipx
-        mkdir -p ${PIPX_HOME}
-        export PIPX_BIN_DIR=/usr/local/bin
-        export PYTHONUSERBASE=/tmp/pip-tmp
-        export PIP_CACHE_DIR=/tmp/pip-tmp/cache
-        pipx_bin=pipx
-        if ! type pipx > /dev/null 2>&1; then
-            pip3 install --disable-pip-version-check --no-cache-dir --user pipx
-            pipx_bin=/tmp/pip-tmp/bin/pipx
-        fi
-        ${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' docker-compose
-        rm -rf /tmp/pip-tmp
-    else 
-        find_version_from_git_tags DOCKER_DASH_COMPOSE_VERSION "https://github.com/docker/compose" "tags/"
-        echo "(*) Installing docker-compose ${DOCKER_DASH_COMPOSE_VERSION}..."
-        curl -fsSL "https://github.com/docker/compose/releases/download/${DOCKER_DASH_COMPOSE_VERSION}/docker-compose-Linux-x86_64" -o /usr/local/bin/docker-compose
-        chmod +x /usr/local/bin/docker-compose
-    fi
-fi
-
-# If init file already exists, exit
-if [ -f "/usr/local/share/docker-init.sh" ]; then
-    exit 0
-fi
-echo "docker-init doesnt exist, adding..."
-
-# By default, make the source and target sockets the same
-if [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ]; then
-    touch "${SOURCE_SOCKET}"
-    ln -s "${SOURCE_SOCKET}" "${TARGET_SOCKET}"
-fi
-
-# Add a stub if not adding non-root user access, user is root
-if [ "${ENABLE_NONROOT_DOCKER}" = "false" ] || [ "${USERNAME}" = "root" ]; then
-    echo '/usr/bin/env bash -c "\$@"' > /usr/local/share/docker-init.sh
-    chmod +x /usr/local/share/docker-init.sh
-    exit 0
-fi
-
-# If enabling non-root access and specified user is found, setup socat and add script
-chown -h "${USERNAME}":root "${TARGET_SOCKET}"        
-if ! dpkg -s socat > /dev/null 2>&1; then
-    apt_get_update_if_needed
-    apt-get -y install socat
-fi
-tee /usr/local/share/docker-init.sh > /dev/null \
-<< EOF 
-#!/usr/bin/env bash
-#-------------------------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
-#-------------------------------------------------------------------------------------------------------------
-
-set -e
-
-SOCAT_PATH_BASE=/tmp/vscr-docker-from-docker
-SOCAT_LOG=\${SOCAT_PATH_BASE}.log
-SOCAT_PID=\${SOCAT_PATH_BASE}.pid
-
-# Wrapper function to only use sudo if not already root
-sudoIf()
-{
-    if [ "\$(id -u)" -ne 0 ]; then
-        sudo "\$@"
-    else
-        "\$@"
-    fi
-}
-
-# Log messages
-log()
-{
-    echo -e "[\$(date)] \$@" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
-}
-
-echo -e "\n** \$(date) **" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
-log "Ensuring ${USERNAME} has access to ${SOURCE_SOCKET} via ${TARGET_SOCKET}"
-
-# If enabled, try to add a docker group with the right GID. If the group is root, 
-# fall back on using socat to forward the docker socket to another unix socket so 
-# that we can set permissions on it without affecting the host.
-if [ "${ENABLE_NONROOT_DOCKER}" = "true" ] && [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ] && [ "${USERNAME}" != "root" ] && [ "${USERNAME}" != "0" ]; then
-    SOCKET_GID=\$(stat -c '%g' ${SOURCE_SOCKET})
-    if [ "\${SOCKET_GID}" != "0" ]; then
-        log "Adding user to group with GID \${SOCKET_GID}."
-        if [ "\$(cat /etc/group | grep :\${SOCKET_GID}:)" = "" ]; then
-            sudoIf groupadd --gid \${SOCKET_GID} docker-host
-        fi
-        # Add user to group if not already in it
-        if [ "\$(id ${USERNAME} | grep -E "groups.*(=|,)\${SOCKET_GID}\(")" = "" ]; then
-            sudoIf usermod -aG \${SOCKET_GID} ${USERNAME}
-        fi
-    else
-        # Enable proxy if not already running
-        if [ ! -f "\${SOCAT_PID}" ] || ! ps -p \$(cat \${SOCAT_PID}) > /dev/null; then
-            log "Enabling socket proxy."
-            log "Proxying ${SOURCE_SOCKET} to ${TARGET_SOCKET} for vscode"
-            sudoIf rm -rf ${TARGET_SOCKET}
-            (sudoIf socat UNIX-LISTEN:${TARGET_SOCKET},fork,mode=660,user=${USERNAME} UNIX-CONNECT:${SOURCE_SOCKET} 2>&1 | sudoIf tee -a \${SOCAT_LOG} > /dev/null & echo "\$!" | sudoIf tee \${SOCAT_PID} > /dev/null)
-        else
-            log "Socket proxy already running."
-        fi
-    fi
-    log "Success"
-fi
-
-# Execute whatever commands were passed in (if any). This allows us 
-# to set this script to ENTRYPOINT while still executing the default CMD.
-set +e
-exec "\$@"
-EOF
-chmod +x /usr/local/share/docker-init.sh
-chown ${USERNAME}:root /usr/local/share/docker-init.sh
-echo "Done!"

+ 0 - 6
.earthlyignore

@@ -1,6 +0,0 @@
-.github
-.vscode
-Earthfile
-extra/doc
-bin
-out

+ 0 - 14
.github/workflows/cancel.yml

@@ -1,14 +0,0 @@
-name: Cancel previous jobs
-on:
-  workflow_run:
-    workflows: ["CI"]
-    types:
-      - requested
-jobs:
-  cancel:
-    runs-on: ubuntu-latest
-    steps:
-    - name: Cancel previous runs
-      uses: styfle/[email protected]
-      with:
-        workflow_id: ${{ github.event.workflow.id }}

+ 195 - 112
.github/workflows/main.yml

@@ -4,6 +4,13 @@
 name: CI
 on: [push, pull_request]
 
+env:
+  OCAML_VERSION: 5.3.0
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
 jobs:
   windows64-build:
     runs-on: windows-latest
@@ -18,11 +25,6 @@ jobs:
         with:
           submodules: recursive
 
-      - name: Use GNU Tar from msys
-        run: |
-          echo "C:\msys64\usr\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
-          rm C:\msys64\usr\bin\bash.exe
-
       - name: choco install nsis
         uses: nick-invision/retry@v3
         with:
@@ -51,43 +53,37 @@ jobs:
         run: neko -version 2>&1
 
       - name: Setup ocaml
-        uses: ocaml/setup-ocaml@v2
+        uses: ocaml/setup-ocaml@v3
         with:
-          ocaml-compiler: 4.08.1
-          opam-repositories: |
-            opam-repository-mingw: https://github.com/ocaml-opam/opam-repository-mingw.git#sunset
-            default: https://github.com/ocaml/opam-repository.git
+          ocaml-compiler: ${{ env.OCAML_VERSION }}
           opam-local-packages: |
             haxe.opam
 
       - name: Install dependencies
         shell: pwsh
+        env:
+          MBEDTLS_VERSION: 2.16.3
         run: |
-          Set-PSDebug -Trace 1
-          curl.exe -fsSL -o "libmbedtls.tar.xz" --retry 3 https://github.com/Simn/mingw64-mbedtls/releases/download/2.16.3/mingw64-$($env:MINGW_ARCH)-mbedtls-2.16.3-1.tar.xz
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'curl -L https://cpanmin.us | perl - App::cpanminus')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm IPC::System::Simple module')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm String::ShellQuote')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'echo "$OLDPWD"')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && tar -C / -xvf libmbedtls.tar.xz')
+          curl.exe -fsSL -o "libmbedtls.tar.xz" --retry 3 `
+            https://github.com/Simn/mingw64-mbedtls/releases/download/${{ env.MBEDTLS_VERSION }}/mingw64-${{ env.MINGW_ARCH }}-mbedtls-${{ env.MBEDTLS_VERSION }}-1.tar.xz
+          ${{ env.CYG_ROOT }}\bin\tar.exe -C ${{ env.CYG_ROOT }} -xvf libmbedtls.tar.xz
 
       - name: Install OCaml libraries
-        shell: pwsh
-        run: |
-          Set-PSDebug -Trace 1
-          opam install haxe --deps-only
-          opam list
-
-      - name: Expose mingw dll files
-        shell: pwsh
-        run: Write-Host "::add-path::${env:CYG_ROOT}/usr/$($env:MINGW_ARCH)-w64-mingw32/sys-root/mingw/bin"
-
-      # required to be able to retrieve the revision
-      - name: Mark directory as safe
-        shell: pwsh
-        run: |
-          Set-PSDebug -Trace 1
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'git config --global --add safe.directory "$OLDPWD"')
+        uses: nick-fields/retry@v3
+        with:
+          timeout_minutes: 10
+          max_attempts: 10
+          retry_on: timeout
+          shell: pwsh
+          command: |
+            Set-PSDebug -Trace 1
+            # stop after any command returns an error
+            $PSNativeCommandUseErrorActionPreference = $true
+            $ErrorActionPreference = 'Stop'
+            # see: https://github.com/aantron/luv/issues/162
+            $env:PATH="${env:CYG_ROOT}\bin;${env:CYG_ROOT}\usr\x86_64-w64-mingw32\bin;${env:PATH}"
+            opam install haxe --deps-only
+            opam list
 
       - name: Set ADD_REVISION=1 for non-release
         if: ${{ !startsWith(github.ref, 'refs/tags/') }}
@@ -98,20 +94,15 @@ jobs:
         shell: pwsh
         run: |
           Set-PSDebug -Trace 1
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -s -f Makefile.win -j`nproc` haxe 2>&1')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -s -f Makefile.win haxelib 2>&1')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -f Makefile.win echo_package_files package_bin package_installer_win package_choco 2>&1')
-          dir out
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && cygcheck ./haxe.exe')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && cygcheck ./haxelib.exe')
-          & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && ls ./out')
-
-      - name: Check artifact
-        shell: bash
-        run: |
-          ls out
-          # Output should contain binaries zip, installer zip and nupkg
-          [ $(ls -1 out | wc -l) -eq "3" ]
+          # stop after any command returns an error
+          $PSNativeCommandUseErrorActionPreference = $true
+          $ErrorActionPreference = 'Stop'
+          opam exec -- make -s -f Makefile.win -j"$env:NUMBER_OF_PROCESSORS" haxe
+          opam exec -- make -s -f Makefile.win haxelib
+          opam exec -- make -f Makefile.win echo_package_files package_bin package_installer_win package_choco
+          cygcheck ./haxe.exe
+          cygcheck ./haxelib.exe
+          ls ./out
 
       - name: Upload artifact
         uses: actions/upload-artifact@v4
@@ -121,14 +112,10 @@ jobs:
 
 
   linux-build:
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       OPAMYES: 1
-    strategy:
-      fail-fast: false
-      matrix:
-        ocaml: ["4.08.1", "5.0.0"]
     steps:
       - uses: actions/checkout@main
         with:
@@ -139,7 +126,7 @@ jobs:
         uses: actions/cache@v4
         with:
           path: ~/.opam/
-          key: ${{ runner.os }}-${{ matrix.ocaml }}-${{ hashFiles('./haxe.opam', './libs/') }}
+          key: ${{ runner.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
       - name: Install Neko from S3
         run: |
@@ -164,18 +151,17 @@ jobs:
       - name: Install dependencies
         run: |
           set -ex
-          sudo add-apt-repository ppa:avsm/ppa -y # provides OPAM 2
-          sudo add-apt-repository ppa:haxe/ocaml -y # provides newer version of mbedtls
           sudo apt-get update -qqy
-          sudo apt-get install -qqy ocaml-nox camlp5 opam libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build libstring-shellquote-perl libipc-system-simple-perl
+          sudo apt-get install -qqy darcs bubblewrap ocaml-nox libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build
+          curl -sSL https://github.com/ocaml/opam/releases/download/2.3.0/opam-2.3.0-x86_64-linux -o $RUNNER_TEMP/opam
+          sudo install $RUNNER_TEMP/opam /usr/local/bin/opam
 
       - name: Install OCaml libraries
         if: steps.cache-opam.outputs.cache-hit != 'true'
         run: |
           set -ex
-          opam init # --disable-sandboxing
+          opam init -c ${{ env.OCAML_VERSION }}
           opam update
-          opam switch create ${{ matrix.ocaml }}
           opam pin add haxe . --no-action
           opam install haxe --deps-only --assume-depexts
           opam list
@@ -203,7 +189,6 @@ jobs:
         run: echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
 
       - name: Build xmldoc
-        if: matrix.ocaml == '4.08.1'
         run: |
           set -ex
           make -s xmldoc
@@ -217,19 +202,18 @@ jobs:
       - name: Upload artifact
         uses: actions/upload-artifact@v4
         with:
-          name: linuxBinaries${{ (matrix.ocaml == '5.0.0' && '_ocaml5') || '' }}
+          name: linuxBinaries
           path: out
 
       - name: Upload xmldoc artifact
         uses: actions/upload-artifact@v4
-        if: matrix.ocaml == '4.08.1'
         with:
           name: xmldoc
           path: extra/doc
 
   linux-test:
     needs: linux-build
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       TEST: ${{matrix.target}}
@@ -238,7 +222,6 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        ocaml: ["4.08.1", "5.0.0"]
         target: [macro, js, hl, cpp, jvm, php, python, lua, flash, neko]
         include:
           - target: hl
@@ -255,7 +238,7 @@ jobs:
           submodules: recursive
       - uses: actions/download-artifact@v4
         with:
-          name: linuxBinaries${{ (matrix.ocaml == '5.0.0' && '_ocaml5') || '' }}
+          name: linuxBinaries
           path: linuxBinaries
 
       - name: Install Neko from S3
@@ -313,10 +296,11 @@ jobs:
       - name: Test
         run: haxe RunCi.hxml
         working-directory: ${{github.workspace}}/tests
+        timeout-minutes: 20
 
   test-docgen:
     needs: linux-build
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       HXCPP_COMPILE_CACHE: ~/hxcache
@@ -394,61 +378,162 @@ jobs:
           cpp/Dox -i ../../xmldoc -ex microsoft -ex javax -theme $(haxelib libpath dox)/themes/default
         working-directory: ${{github.workspace}}/tests/docgen
 
-  linux-arm64:
-    runs-on: ubuntu-20.04
-    permissions:
-      packages: write
+  linux-arm64-build:
+    runs-on: ubuntu-22.04-arm
     env:
-      FORCE_COLOR: 1
+      PLATFORM: linux-arm64
+      OPAMYES: 1
     steps:
-      - name: Login to GitHub Container Registry
-        uses: docker/login-action@v3
+      - uses: actions/checkout@main
         with:
-          registry: ghcr.io
-          username: ${{ github.actor }}
-          password: ${{ secrets.GITHUB_TOKEN }}
-
-      - name: Install Earthly
-        run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/download/v0.6.13/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete'
+          submodules: recursive
 
-      - name: Set up QEMU
-        id: qemu
-        uses: docker/setup-qemu-action@v3
+      - name: Cache opam
+        id: cache-opam
+        uses: actions/cache@v4
         with:
-            image: tonistiigi/binfmt:latest
-            platforms: all
+          path: ~/.opam/
+          key: arm-${{ runner.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
-      - uses: actions/checkout@main
-        with:
-          submodules: recursive
+      - name: Install Neko from S3
+        run: |
+          set -ex
 
-      - name: Set CONTAINER_ vars
+          curl -sSL https://build.haxe.org/builds/neko/$PLATFORM/neko_latest.tar.gz -o $RUNNER_TEMP/neko_latest.tar.gz
+          tar -xf $RUNNER_TEMP/neko_latest.tar.gz -C $RUNNER_TEMP
+          NEKOPATH=`echo $RUNNER_TEMP/neko-*-*`
+          sudo mkdir -p /usr/local/bin
+          sudo mkdir -p /usr/local/include
+          sudo mkdir -p /usr/local/lib/neko
+          sudo ln -s $NEKOPATH/{neko,nekoc,nekoml,nekotools}  /usr/local/bin/
+          sudo ln -s $NEKOPATH/libneko.*                      /usr/local/lib/
+          sudo ln -s $NEKOPATH/include/*                      /usr/local/include/
+          sudo ln -s $NEKOPATH/*.ndll                         /usr/local/lib/neko/
+          echo "NEKOPATH=$NEKOPATH" >> $GITHUB_ENV
+
+      - name: Print Neko version
+        run: neko -version 2>&1
+
+
+      - name: Install dependencies
         run: |
-          echo "CONTAINER_REG=$(echo ${{ github.repository }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV;
-          echo "CONTAINER_TAG=$(echo ${{ github.ref_name }} | sed -e 's/[^A-Za-z0-9\.]/-/g')" >> $GITHUB_ENV;
+          set -ex
+          sudo apt-get update -qqy
+          sudo apt-get install -qqy opam libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build
 
-      - name: Build devcontainer
-        run: earthly --platform=linux/arm64 +devcontainer --IMAGE_NAME="ghcr.io/${CONTAINER_REG}_devcontainer" --IMAGE_TAG="${CONTAINER_TAG}-arm64" --IMAGE_CACHE="ghcr.io/haxefoundation/haxe_devcontainer:development-arm64"
-        env:
-          EARTHLY_PUSH: "${{ github.event_name == 'push' }}"
-          EARTHLY_USE_INLINE_CACHE: true
-          EARTHLY_SAVE_INLINE_CACHE: true
+      - name: Install OCaml libraries
+        if: steps.cache-opam.outputs.cache-hit != 'true'
+        run: |
+          set -ex
+          opam init -c ${{ env.OCAML_VERSION }}
+          opam pin add haxe . --no-action
+          opam install haxe --deps-only --assume-depexts
+          opam list
+          ocamlopt -v
 
       - name: Set ADD_REVISION=1 for non-release
         if: ${{ !startsWith(github.ref, 'refs/tags/') }}
         run: echo "ADD_REVISION=1" >> $GITHUB_ENV
 
-      - name: Build
-        run: earthly --platform=linux/arm64 +build --ADD_REVISION="$ADD_REVISION" --SET_SAFE_DIRECTORY="true"
-        env:
-          EARTHLY_PUSH: "${{ github.event_name == 'push' }}"
-          EARTHLY_REMOTE_CACHE: "ghcr.io/${{env.CONTAINER_REG}}_cache:build-${{env.CONTAINER_TAG}}-arm64"
+      - name: Build Haxe
+        run: |
+          set -ex
+          eval $(opam env)
+          opam config exec -- make -s -j`nproc` STATICLINK=1 haxe
+          opam config exec -- make -s haxelib
+          make -s package_unix
+          ls -l out
+          ldd -v ./haxe
+          ldd -v ./haxelib
+
+      # https://stackoverflow.com/questions/58033366/how-to-get-current-branch-within-github-actions
+      - name: Extract branch name
+        id: extract_branch
+        shell: bash
+        run: echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
 
       - name: Upload artifact
         uses: actions/upload-artifact@v4
         with:
           name: linuxArm64Binaries
-          path: out/linux/arm64
+          path: out
+
+  linux-arm64-test:
+    needs: linux-arm64-build
+    runs-on: ubuntu-22.04-arm
+    env:
+      PLATFORM: linux-arm64
+      TEST: ${{matrix.target}}
+      HXCPP_COMPILE_CACHE: ~/hxcache
+      HAXE_STD_PATH: /usr/local/share/haxe/std
+    strategy:
+      fail-fast: false
+      matrix:
+        target: [macro, js, cpp, jvm, php, python, lua, neko]
+        include:
+          - target: lua
+            APT_PACKAGES: ncurses-dev
+    steps:
+      - uses: actions/checkout@main
+        with:
+          submodules: recursive
+      - uses: actions/download-artifact@v4
+        with:
+          name: linuxArm64Binaries
+          path: linuxBinaries
+
+      - name: Install Neko from S3
+        run: |
+          set -ex
+
+          curl -sSL https://build.haxe.org/builds/neko/$PLATFORM/neko_latest.tar.gz -o $RUNNER_TEMP/neko_latest.tar.gz
+          tar -xf $RUNNER_TEMP/neko_latest.tar.gz -C $RUNNER_TEMP
+          NEKOPATH=`echo $RUNNER_TEMP/neko-*-*`
+          sudo mkdir -p /usr/local/bin
+          sudo mkdir -p /usr/local/include
+          sudo mkdir -p /usr/local/lib/neko
+          sudo ln -s $NEKOPATH/{neko,nekoc,nekoml,nekotools}  /usr/local/bin/
+          sudo ln -s $NEKOPATH/libneko.*                      /usr/local/lib/
+          sudo ln -s $NEKOPATH/include/*                      /usr/local/include/
+          sudo ln -s $NEKOPATH/*.ndll                         /usr/local/lib/neko/
+          echo "NEKOPATH=$NEKOPATH" >> $GITHUB_ENV
+
+      - name: Print Neko version
+        run: neko -version 2>&1
+
+
+      - name: Setup Haxe
+        run: |
+          sudo apt install -qqy libmbedtls-dev
+
+          set -ex
+          tar -xf linuxBinaries/*_bin.tar.gz -C linuxBinaries --strip-components=1
+          sudo mkdir -p /usr/local/bin/
+          sudo mkdir -p /usr/local/share/haxe/
+          sudo ln -s `pwd`/linuxBinaries/haxe /usr/local/bin/haxe
+          sudo ln -s `pwd`/linuxBinaries/haxelib /usr/local/bin/haxelib
+          sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
+
+      - name: Print Haxe version
+        run: haxe -version
+
+      - name: Setup haxelib
+        run: |
+          set -ex
+          mkdir ~/haxelib
+          haxelib setup ~/haxelib
+
+      - name: Install apt packages
+        if: matrix.APT_PACKAGES
+        run: |
+          set -ex
+          sudo apt update -qqy
+          sudo apt install -qqy ${{matrix.APT_PACKAGES}}
+
+      - name: Test
+        run: haxe RunCi.hxml
+        working-directory: ${{github.workspace}}/tests
+        timeout-minutes: 20
 
   mac-build:
     strategy:
@@ -460,7 +545,6 @@ jobs:
       PLATFORM: mac${{ matrix.os == 'macos-14' && '-arm64' || '' }}
       OPAMYES: 1
       MACOSX_DEPLOYMENT_TARGET: 10.13
-      OCAML_VERSION: 5.1.1
     steps:
       - uses: actions/checkout@main
         with:
@@ -471,7 +555,7 @@ jobs:
         uses: actions/cache@v4
         with:
           path: ~/.opam/
-          key: ${{ matrix.os }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
+          key: ${{ matrix.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
       - name: Install Neko from S3
         run: |
@@ -502,8 +586,6 @@ jobs:
           set -ex
           brew update
           brew bundle --file=tests/Brewfile --no-upgrade
-          cpanm IPC::System::Simple
-          cpanm String::ShellQuote
           curl -L https://github.com/madler/zlib/releases/download/v$ZLIB_VERSION/zlib-$ZLIB_VERSION.tar.gz | tar xz
           cd zlib-$ZLIB_VERSION
           ./configure
@@ -523,9 +605,8 @@ jobs:
         if: steps.cache-opam.outputs.cache-hit != 'true'
         run: |
           set -ex
-          opam init # --disable-sandboxing
+          opam init -c ${{ env.OCAML_VERSION }}
           opam update
-          opam switch create ${{env.OCAML_VERSION}}
           eval $(opam env)
           opam env
           opam pin add haxe . --no-action
@@ -541,7 +622,7 @@ jobs:
         run: |
           set -ex
           eval $(opam env)
-          opam config exec -- make -s -j`sysctl -n hw.ncpu` STATICLINK=1 "LIB_PARAMS=/usr/local/lib/libz.a /usr/local/lib/libpcre2-8.a /usr/local/lib/libmbedtls.a /usr/local/lib/libmbedcrypto.a /usr/local/lib/libmbedx509.a -cclib '-framework Security -framework CoreFoundation'" haxe
+          opam config exec -- make -s STATICLINK=1 "LIB_PARAMS=\"/usr/local/lib/libz.a\" \"/usr/local/lib/libpcre2-8.a\" \"/usr/local/lib/libmbedtls.a\" \"/usr/local/lib/libmbedcrypto.a\" \"/usr/local/lib/libmbedx509.a\"" haxe
           opam config exec -- make -s haxelib
           make -s package_unix package_installer_mac
           ls -l out
@@ -657,6 +738,7 @@ jobs:
         shell: pwsh
         run: haxe RunCi.hxml
         working-directory: ${{github.workspace}}/tests
+        timeout-minutes: 20
 
 
   mac-build-universal:
@@ -767,12 +849,13 @@ jobs:
           echo "" > sys/compile-fs.hxml
           haxe RunCi.hxml
         working-directory: ${{github.workspace}}/tests
+        timeout-minutes: 60
 
 
   deploy:
     if: success() && github.repository_owner == 'HaxeFoundation' && github.event_name != 'pull_request'
-    needs: [linux-test, linux-arm64, mac-test, windows64-test]
-    runs-on: ubuntu-20.04
+    needs: [linux-test, linux-arm64-test, mac-test, windows64-test]
+    runs-on: ubuntu-22.04
     steps:
       # this is only needed for to get `COMMIT_DATE`...
       # maybe https://github.community/t/expose-commit-timestamp-in-the-github-context-data/16460/3
@@ -841,8 +924,8 @@ jobs:
 
   deploy_apidoc:
     if: success() && github.repository_owner == 'HaxeFoundation' && github.event_name != 'pull_request'
-    needs: [linux-test, linux-arm64, mac-test, windows64-test]
-    runs-on: ubuntu-20.04
+    needs: [linux-test, linux-arm64-test, mac-test, windows64-test]
+    runs-on: ubuntu-22.04
     steps:
       - name: Install dependencies
         run: |

+ 1 - 0
.gitignore

@@ -9,6 +9,7 @@
 *.exe
 .*.swp
 .haxelib
+/opam
 /out
 /installer
 

+ 5 - 1
.vscode/settings.json

@@ -28,5 +28,9 @@
 			],
 			"url": "./.vscode/schemas/meta.schema.json"
 		}
-	]
+	],
+	"ocaml.sandbox": {
+		"kind": "opam",
+		"switch": "default"
+	}
 }

+ 4 - 4
.vscode/tasks.json

@@ -9,7 +9,7 @@
 				"command": "eval $(opam env) && make ADD_REVISION=1 -s -j haxe",
 			},
 			"windows": {
-				"command": "make ADD_REVISION=1 -f Makefile.win -s -j haxe"
+				"command": "opam exec -- make ADD_REVISION=1 -f Makefile.win -s -j haxe"
 			},
 			"problemMatcher": [],
 			"group": {
@@ -22,7 +22,7 @@
 			"type": "shell",
 			"command": "make -s -j libs",
 			"windows": {
-				"command": "make -f Makefile.win -s -j libs"
+				"command": "opam exec -- make -f Makefile.win -s -j libs"
 			},
 			"problemMatcher": []
 		},
@@ -31,7 +31,7 @@
 			"type": "shell",
 			"command": "make -s haxelib",
 			"windows": {
-				"command": "make -f Makefile.win -s haxelib"
+				"command": "opam exec -- make -f Makefile.win -s haxelib"
 			},
 			"problemMatcher": ["$haxe", "$haxe-absolute"]
 		},
@@ -40,7 +40,7 @@
 			"type": "shell",
 			"command": "make s -j libs && make ADD_REVISION=1 -s -j haxe && make -s haxelib",
 			"windows": {
-				"command": "make -f Makefile.win -s -j libs && make ADD_REVISION=1 -f Makefile.win -s -j haxe && make -f Makefile.win -s haxelib"
+				"command": "opam exec -- make -f Makefile.win -s -j libs && make ADD_REVISION=1 -f Makefile.win -s -j haxe && make -f Makefile.win -s haxelib"
 			},
 			"problemMatcher": ["$haxe", "$haxe-absolute"]
 		},

+ 0 - 412
Earthfile

@@ -1,412 +0,0 @@
-VERSION 0.6
-FROM mcr.microsoft.com/vscode/devcontainers/base:0-bionic
-ARG DEVCONTAINER_IMAGE_NAME_DEFAULT=ghcr.io/haxefoundation/haxe_devcontainer
-
-ARG USERNAME=vscode
-ARG USER_UID=1000
-ARG USER_GID=$USER_UID
-
-ARG WORKDIR=/workspace
-RUN mkdir -m 777 "$WORKDIR"
-WORKDIR "$WORKDIR"
-
-ARG --required TARGETARCH
-
-devcontainer-library-scripts:
-    RUN curl -fsSLO https://raw.githubusercontent.com/microsoft/vscode-dev-containers/main/script-library/common-debian.sh
-    RUN curl -fsSLO https://raw.githubusercontent.com/microsoft/vscode-dev-containers/main/script-library/docker-debian.sh
-    SAVE ARTIFACT --keep-ts *.sh AS LOCAL .devcontainer/library-scripts/
-
-devcontainer:
-    # Avoid warnings by switching to noninteractive
-    ENV DEBIAN_FRONTEND=noninteractive
-
-    ARG INSTALL_ZSH="false"
-    ARG UPGRADE_PACKAGES="true"
-    ARG ENABLE_NONROOT_DOCKER="true"
-    ARG USE_MOBY="false"
-    COPY .devcontainer/library-scripts/common-debian.sh .devcontainer/library-scripts/docker-debian.sh /tmp/library-scripts/
-    RUN apt-get update \
-        && /bin/bash /tmp/library-scripts/common-debian.sh "${INSTALL_ZSH}" "${USERNAME}" "${USER_UID}" "${USER_GID}" "${UPGRADE_PACKAGES}" "true" "true" \
-        && /bin/bash /tmp/library-scripts/docker-debian.sh "${ENABLE_NONROOT_DOCKER}" "/var/run/docker-host.sock" "/var/run/docker.sock" "${USERNAME}" "${USE_MOBY}" \
-        # Clean up
-        && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts/
-
-    # Setting the ENTRYPOINT to docker-init.sh will configure non-root access
-    # to the Docker socket. The script will also execute CMD as needed.
-    ENTRYPOINT [ "/usr/local/share/docker-init.sh" ]
-    CMD [ "sleep", "infinity" ]
-
-    # Configure apt and install packages
-    RUN apt-get update \
-        && apt-get install -qqy --no-install-recommends apt-utils dialog 2>&1 \
-        && apt-get install -qqy --no-install-recommends \
-            iproute2 \
-            procps \
-            sudo \
-            bash-completion \
-            build-essential \
-            curl \
-            wget \
-            software-properties-common \
-            direnv \
-            tzdata \
-            # install docker engine for using `WITH DOCKER`
-            docker-ce \
-        # install node
-        && curl -sL https://deb.nodesource.com/setup_16.x | bash - \
-        && apt-get install -qqy --no-install-recommends nodejs=16.* \
-        # install ocaml and other haxe compiler deps
-        && add-apt-repository ppa:avsm/ppa \
-        && add-apt-repository ppa:haxe/ocaml \
-        && apt-get install -qqy --no-install-recommends \
-            ocaml-nox \
-            camlp5 \
-            opam \
-            libpcre2-dev \
-            zlib1g-dev \
-            libgtk2.0-dev \
-            libmbedtls-dev \
-            ninja-build \
-            libstring-shellquote-perl \
-            libipc-system-simple-perl \
-        #
-        # Clean up
-        && apt-get autoremove -y \
-        && apt-get clean -y \
-        && rm -rf /var/lib/apt/lists/*
-
-    # Switch back to dialog for any ad-hoc use of apt-get
-    ENV DEBIAN_FRONTEND=
-
-    DO +INSTALL_NEKO
-
-    COPY +earthly/earthly /usr/local/bin/
-    RUN earthly bootstrap --no-buildkit --with-autocomplete
-
-    USER $USERNAME
-
-    # Do not show git branch in bash prompt because it's slow
-    # https://github.com/microsoft/vscode-dev-containers/issues/1196#issuecomment-988388658
-    RUN git config --global codespaces-theme.hide-status 1
-
-    # Install OCaml libraries
-    COPY haxe.opam .
-    RUN opam init --disable-sandboxing
-    RUN opam switch create 4.08.1
-    RUN eval $(opam env)
-    RUN opam env
-    RUN opam install . --yes --deps-only --no-depexts
-    RUN opam list
-    RUN ocamlopt -v
-
-    USER root
-
-    ARG IMAGE_NAME="$DEVCONTAINER_IMAGE_NAME_DEFAULT"
-    ARG IMAGE_TAG="development"
-    ARG IMAGE_CACHE="$IMAGE_NAME:$IMAGE_TAG"
-    SAVE IMAGE --cache-from="$IMAGE_CACHE" --push "$IMAGE_NAME:$IMAGE_TAG"
-
-devcontainer-multiarch-amd64:
-    ARG IMAGE_NAME="$DEVCONTAINER_IMAGE_NAME_DEFAULT"
-    ARG IMAGE_TAG="development"
-    FROM --platform=linux/amd64 +devcontainer --IMAGE_NAME="$IMAGE_NAME" --IMAGE_TAG="$IMAGE_TAG-amd64"
-    SAVE IMAGE --push "$IMAGE_NAME:$IMAGE_TAG"
-
-devcontainer-multiarch-arm64:
-    ARG IMAGE_NAME="$DEVCONTAINER_IMAGE_NAME_DEFAULT"
-    ARG IMAGE_TAG="development"
-    FROM --platform=linux/arm64 +devcontainer --IMAGE_NAME="$IMAGE_NAME" --IMAGE_TAG="$IMAGE_TAG-arm64"
-    SAVE IMAGE --push "$IMAGE_NAME:$IMAGE_TAG"
-
-devcontainer-multiarch:
-    BUILD +devcontainer-multiarch-amd64
-    BUILD +devcontainer-multiarch-arm64
-
-# Usage:
-# COPY +earthly/earthly /usr/local/bin/
-# RUN earthly bootstrap --no-buildkit --with-autocomplete
-earthly:
-    ARG --required TARGETARCH
-    RUN curl -fsSL https://github.com/earthly/earthly/releases/download/v0.6.13/earthly-linux-${TARGETARCH} -o /usr/local/bin/earthly \
-        && chmod +x /usr/local/bin/earthly
-    SAVE ARTIFACT /usr/local/bin/earthly
-
-INSTALL_PACKAGES:
-    COMMAND
-    ARG PACKAGES
-    RUN apt-get update -qqy && \
-        apt-get install -qqy --no-install-recommends $PACKAGES && \
-        apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*
-
-INSTALL_NEKO:
-    COMMAND
-    ARG NEKOPATH=/neko
-    COPY +neko/* "$NEKOPATH/"
-    ARG PREFIX=/usr/local
-    RUN bash -c "ln -s \"$NEKOPATH\"/{neko,nekoc,nekoml,nekotools} \"$PREFIX/bin/\""
-    RUN bash -c "ln -s \"$NEKOPATH\"/libneko.* \"$PREFIX/lib/\""
-    RUN bash -c "ln -s \"$NEKOPATH\"/*.h \"$PREFIX/include/\""
-    RUN mkdir -p "$PREFIX/lib/neko/"
-    RUN bash -c "ln -s \"$NEKOPATH\"/*.ndll \"$PREFIX/lib/neko/\""
-    RUN ldconfig
-
-INSTALL_HAXE:
-    COMMAND
-    ARG PREFIX=/usr/local
-    COPY +build/haxe +build/haxelib "$PREFIX/bin/"
-    COPY std "$PREFIX/share/haxe/std"
-
-try-neko:
-    DO +INSTALL_NEKO
-    RUN neko -version
-    RUN nekotools
-
-try-haxe:
-    DO +INSTALL_NEKO
-    DO +INSTALL_HAXE
-    RUN haxe --version
-    RUN haxelib version
-
-neko:
-    RUN set -ex && \
-        case "$TARGETARCH" in \
-            amd64) PLATFORM=linux64;; \
-            arm64) PLATFORM=linux-arm64;; \
-            *) exit 1;; \
-        esac && \
-        curl -fsSL https://build.haxe.org/builds/neko/$PLATFORM/neko_latest.tar.gz -o neko_latest.tar.gz && \
-        tar -xf neko_latest.tar.gz && \
-        mv `echo neko-*-*` /tmp/neko-unpacked
-    SAVE ARTIFACT /tmp/neko-unpacked/*
-    SAVE IMAGE --cache-hint
-
-build:
-    FROM +devcontainer
-
-    USER $USERNAME
-
-    # Build Haxe
-    COPY --dir extra libs plugins src* std dune* Makefile* .
-
-    # the Makefile calls git to get commit sha
-    COPY .git .git
-    ARG SET_SAFE_DIRECTORY="false"
-    IF [ "$SET_SAFE_DIRECTORY" = "true" ]
-        RUN git config --global --add safe.directory "$WORKDIR"
-    END
-
-    ARG ADD_REVISION
-    ENV ADD_REVISION=$ADD_REVISION
-    RUN opam config exec -- make -s -j`nproc` STATICLINK=1 haxe && ldd -v ./haxe
-    RUN opam config exec -- make -s haxelib && ldd -v ./haxelib
-    RUN make -s package_unix && ls -l out
-
-    ARG TARGETPLATFORM
-    SAVE ARTIFACT --keep-ts ./out/* AS LOCAL out/$TARGETPLATFORM/
-    SAVE ARTIFACT --keep-ts ./haxe AS LOCAL out/$TARGETPLATFORM/
-    SAVE ARTIFACT --keep-ts ./haxelib AS LOCAL out/$TARGETPLATFORM/
-    SAVE IMAGE --cache-hint
-
-build-multiarch:
-    ARG ADD_REVISION
-    BUILD --platform=linux/amd64 --platform=linux/arm64 +build --ADD_REVISION=$ADD_REVISION
-
-xmldoc:
-    DO +INSTALL_NEKO
-    DO +INSTALL_HAXE
-
-    COPY --dir extra .
-
-    WORKDIR extra
-    RUN haxelib newrepo
-    RUN haxelib git hxcpp  https://github.com/HaxeFoundation/hxcpp
-    RUN haxelib git hxjava https://github.com/HaxeFoundation/hxjava
-    RUN haxe doc.hxml
-
-    ARG COMMIT
-    ARG BRANCH
-    RUN echo "{\"commit\":\"$COMMIT\",\"branch\":\"$BRANCH\"}" > doc/info.json
-
-    SAVE ARTIFACT --keep-ts ./doc AS LOCAL extra/doc
-
-test-environment:
-    # we use a sightly newer ubuntu for easier installation of the target runtimes (e.g. php)
-    FROM ubuntu:focal
-    DO +INSTALL_NEKO
-    DO +INSTALL_HAXE
-
-    ENV DEBIAN_FRONTEND=noninteractive
-    DO +INSTALL_PACKAGES --PACKAGES="ca-certificates curl wget git build-essential locales sqlite3"
-
-    # Node.js is required as there are tests that use it (search "-cmd node")
-    RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash - && \
-        apt-get install -qqy nodejs && \
-        apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*
-
-    # set locale
-    RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
-    ENV LANG=en_US.UTF-8
-    ENV LANGUAGE=en_US:en
-    ENV LC_ALL=en_US.UTF-8
-
-    SAVE IMAGE --cache-hint
-
-test-environment-java:
-    FROM +test-environment
-    DO +INSTALL_PACKAGES --PACKAGES="default-jdk"
-    SAVE IMAGE --cache-hint
-
-test-environment-js:
-    # somehow js tests require hxjava which in turns require javac
-    FROM +test-environment-java
-
-test-environment-python:
-    FROM +test-environment
-    DO +INSTALL_PACKAGES --PACKAGES="python3"
-    SAVE IMAGE --cache-hint
-
-test-environment-php:
-    FROM +test-environment
-    DO +INSTALL_PACKAGES --PACKAGES="php-cli php-mbstring php-sqlite3"
-    SAVE IMAGE --cache-hint
-
-test-environment-hl:
-    FROM +test-environment
-    DO +INSTALL_PACKAGES --PACKAGES="cmake ninja-build libturbojpeg-dev libpng-dev zlib1g-dev libvorbis-dev libsqlite3-dev"
-    SAVE IMAGE --cache-hint
-
-test-environment-lua:
-    # hererocks uses pip
-    FROM +test-environment-python
-    DO +INSTALL_PACKAGES --PACKAGES="libssl-dev libreadline-dev python3-pip unzip libpcre2-dev cmake"
-    RUN ln -s /root/.local/bin/hererocks /bin/
-    SAVE IMAGE --cache-hint
-
-test-environment-cpp:
-    FROM +test-environment
-
-    ARG TARGETPLATFORM
-
-    IF [ "$TARGETPLATFORM" = "linux/amd64" ]
-        DO +INSTALL_PACKAGES --PACKAGES="g++-multilib"
-    ELSE IF [ "$TARGETPLATFORM" = "linux/arm64" ]
-        DO +INSTALL_PACKAGES --PACKAGES="g++-multilib-arm-linux-gnueabi"
-    ELSE
-        RUN echo "Unsupported platform $TARGETPLATFORM" && exit 1
-    END
-
-    SAVE IMAGE --cache-hint
-
-test-environment-flash:
-    # apache flex requires java
-    FROM +test-environment-java
-    # requirements for running flash player
-    DO +INSTALL_PACKAGES --PACKAGES="libglib2.0-0 libfreetype6 xvfb libxcursor1 libnss3 libgtk2.0-0"
-    SAVE IMAGE --cache-hint
-
-RUN_CI:
-    COMMAND
-    COPY tests tests
-    RUN mkdir /haxelib && haxelib setup /haxelib
-    WORKDIR tests
-    ARG --required TEST
-    ENV TEST="$TEST"
-    RUN haxe RunCi.hxml
-
-test-macro:
-    FROM +test-environment
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=macro
-
-test-neko:
-    FROM +test-environment
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=neko
-
-test-js:
-    FROM +test-environment-js
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=js
-
-test-hl:
-    FROM +test-environment-hl
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=hl
-
-test-cpp:
-    FROM +test-environment-cpp
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=cpp
-
-test-java:
-    FROM +test-environment-java
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=java
-
-test-jvm:
-    FROM +test-environment-java
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=jvm
-
-test-php:
-    FROM +test-environment-php
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=php
-
-test-python:
-    FROM +test-environment-python
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=python
-
-test-lua:
-    FROM +test-environment-lua
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=lua
-
-test-flash:
-    FROM +test-environment-flash
-    ARG GITHUB_ACTIONS
-    ENV GITHUB_ACTIONS=$GITHUB_ACTIONS
-    DO +RUN_CI --TEST=flash
-
-test-all:
-    ARG TARGETPLATFORM
-
-    BUILD +test-macro
-    BUILD +test-neko
-    BUILD +test-php
-    BUILD +test-python
-    BUILD +test-java
-    BUILD +test-jvm
-    BUILD +test-cpp
-    BUILD +test-lua
-    BUILD +test-js
-    BUILD +test-flash
-
-    IF [ "$TARGETPLATFORM" = "linux/amd64" ]
-        BUILD +test-hl # FIXME: hl can't compile on arm64 (JIT issue?)
-    END
-
-github-actions:
-    DO +INSTALL_NEKO
-    DO +INSTALL_HAXE
-    RUN mkdir -p "$WORKDIR"/.github/workflows
-    COPY extra/github-actions extra/github-actions
-    WORKDIR extra/github-actions
-    RUN haxe build.hxml
-    SAVE ARTIFACT --keep-ts "$WORKDIR"/.github/workflows AS LOCAL .github/workflows
-
-ghcr-login:
-    LOCALLY
-    RUN echo "$GITHUB_CR_PAT" | docker login ghcr.io -u "$GITHUB_USERNAME" --password-stdin

+ 3 - 15
Makefile

@@ -63,26 +63,14 @@ NEKO_VERSION=2.4.0-rc.1
 NEKO_MAJOR_VERSION=$(shell echo "$(NEKO_VERSION)" | grep -oE "^[0-9]+")
 NEKO_VERSION_TAG=v$(shell echo "$(NEKO_VERSION)" | sed "s/\./-/g")
 
-ifneq ($(STATICLINK),0)
-	LIB_PARAMS= -cclib '-Wl,-Bstatic -lpcre2-8 -lz -lmbedtls -lmbedx509 -lmbedcrypto -Wl,-Bdynamic '
-else
-	LIB_PARAMS?= -cclib -lpcre2-8 -cclib -lz -cclib -lmbedtls -cclib -lmbedx509 -cclib -lmbedcrypto
-endif
-ifeq ($(SYSTEM_NAME),Mac)
-	LIB_PARAMS+= -cclib '-framework Security -framework CoreFoundation'
-endif
-
 all: haxe tools
 
 haxe:
-	$(DUNE_COMMAND) build --workspace dune-workspace.dev src-prebuild/prebuild.exe
-	_build/default/src-prebuild/prebuild.exe libparams $(LIB_PARAMS) > lib.sexp
-	_build/default/src-prebuild/prebuild.exe version "$(ADD_REVISION)" "$(BRANCH)" "$(COMMIT_SHA)" > src/compiler/version.ml
-	$(DUNE_COMMAND) build --workspace dune-workspace.dev src/haxe.exe
+	dune build --profile release src/haxe.exe
 	cp -f _build/default/src/haxe.exe ./"$(HAXE_OUTPUT)"
 
 plugin: haxe
-	$(DUNE_COMMAND) build --workspace dune-workspace.dev plugins/$(PLUGIN)/$(PLUGIN).cmxs
+	$(DUNE_COMMAND) build --profile release plugins/$(PLUGIN)/$(PLUGIN).cmxs
 	mkdir -p plugins/$(PLUGIN)/cmxs/$(SYSTEM_NAME)
 	cp -f _build/default/plugins/$(PLUGIN)/$(PLUGIN).cmxs plugins/$(PLUGIN)/cmxs/$(SYSTEM_NAME)/plugin.cmxs
 
@@ -142,7 +130,7 @@ uninstall:
 	rm -rf $(DESTDIR)$(INSTALL_STD_DIR)
 
 opam_install:
-	opam install camlp5 ocamlfind dune --yes
+	opam install ocamlfind dune --yes
 
 haxe_deps:
 	opam pin add haxe . --no-action

+ 0 - 4
Makefile.win

@@ -42,10 +42,6 @@ ifdef FILTER
 CC_CMD=($(COMPILER) $(ALL_CFLAGS) -c $< 2>tmp.cmi && $(FILTER)) || ($(FILTER) && exit 1)
 endif
 
-ifeq ($(STATICLINK),0)
-	LIB_PARAMS = -cclib -lpcre2-8 -cclib -lz -cclib -lcrypt32 -cclib -lmbedtls -cclib -lmbedcrypto -cclib -lmbedx509
-endif
-
 PACKAGE_FILES=$(HAXE_OUTPUT) $(HAXELIB_OUTPUT) std \
 	"$$(cygcheck $(CURDIR)/$(HAXE_OUTPUT) | grep zlib1.dll | sed -e 's/^\s*//')" \
 	"$$(cygcheck $(CURDIR)/$(HAXE_OUTPUT) | grep libpcre2-8-0.dll | sed -e 's/^\s*//')" \

+ 96 - 0
WinSetup.ps1

@@ -0,0 +1,96 @@
+# Usage:
+# - install Git
+# - install Neko
+# - checkout haxe git
+# - run from command "powershell -noexit -ExecutionPolicy Bypass -File .\WinSetup.ps1"
+
+function Cmd-Path($file) {
+	try { Split-Path -Parent (Get-Command "$file.exe" -ErrorAction Stop).Source } catch { "" }
+}
+
+# resolve Opam binary and repo
+# you can choose when opam is installed by setting OPAM_INSTALL_DIR (and OPAMROOT - optional)
+
+$Opam = Cmd-Path "opam"
+$OpamRepo = $env:OPAMROOT
+$Git = Cmd-Path "git"
+
+if( !$Opam ) { $Opam = $env:OPAM_INSTALL_DIR }
+if( !$Opam ) { $Opam = (Get-Item .).FullName + "\opam" }
+if( !$OpamRepo ) { $OpamRepo = "$Opam\repo" }
+
+$CygRoot = "$OpamRepo\.cygwin\root"
+$WinSysPath = "$env:SystemRoot\System32"
+$Neko = Cmd-Path "neko"
+$RegPath = "HKCU:\Environment"
+$MbedVer = "2.16.3"
+$MbedTLS = "https://github.com/Simn/mingw64-mbedtls/releases/download/$MbedVer/mingw64-x86_64-mbedtls-$MbedVer-1.tar.xz"
+
+function Install-Init {
+
+	if( !$Git ) {
+		echo "**ERROR** git.exe could not be found in PATH"
+		Exit
+	}
+
+	if( !$Neko ) {
+		echo "**ERROR** Neko.exe could not be found in PATH"
+		Exit
+	}
+
+	# reset PATH to prevent conflicting cygwin or existing install
+	Set-Item -Path env:PATH -Value "$CygRoot\usr\x86_64-w64-mingw32\bin;$CygRoot\bin;$Opam;$Neko;$Git;$WinSysPath"
+
+	# set OPAM root dir
+	Set-Item -Path env:OPAMROOT -Value "$OpamRepo"
+}
+
+function Install-Opam {
+	# download opam binary
+	Invoke-Expression "& { $(Invoke-RestMethod https://opam.ocaml.org/install.ps1)} -OpamBinDir $Opam"
+
+	# init opam, assume that we have windows GIT installed
+	Invoke-Expression "opam init --cygwin-internal-install --no-git-location --shell=powershell --shell-setup"
+}
+
+function Install-Haxe-Deps {
+	Invoke-Expression "opam install . --deps-only --confirm-level=yes"
+
+	# install mbedtls mingw package
+	$tmpFile = "./mbed.tgz"
+	Invoke-Expression "curl $MbedTLS -o $tmpFile"
+	Invoke-Expression "tar -C / -xvf $tmpFile"
+	Remove-Item "$tmpFile"
+
+	# install lsp server
+	Invoke-Expression "opam install ocaml-lsp-server --confirm-level=yes"
+}
+
+function Add-Path($NewPath) {
+	$CurrentPath = (Get-ItemProperty -Path $RegPath -Name Path).Path
+	if ($CurrentPath -notlike "*$NewPath*") {
+		$CurrentPath = "$NewPath;$CurrentPath"
+		Set-ItemProperty -Path $RegPath -Name Path -Value $CurrentPath
+	}
+}
+
+function Setup-Paths {
+	Add-Path "$OpamRepo\default\bin"
+	Add-Path "$CygRoot\bin"
+	Add-Path "$CygRoot\usr\x86_64-w64-mingw32\bin"
+	Add-Path "$CygRoot\usr\x86_64-w64-mingw32\sys-root\mingw\bin"
+	Set-ItemProperty -Path $RegPath -Name OPAMROOT -Value $OpamRepo
+
+	# refresh for all processes (no need to restart)
+	$signature = @"
+[DllImport("user32.dll", CharSet = CharSet.Auto)]
+public static extern int SendMessageTimeout(IntPtr hWnd, int Msg, IntPtr wParam, string lParam, int fuFlags, int uTimeout, out IntPtr lpdwResult);
+"@
+	$SendMessageTimeout = Add-Type -MemberDefinition $signature -Name "Win32SendMessageTimeout" -Namespace Win32Functions -PassThru
+	$SendMessageTimeout::SendMessageTimeout([IntPtr]0xFFFF, 0x1A, [IntPtr]::Zero, "Environment", 2, 5000, [ref][IntPtr]::Zero)
+}
+
+Install-Init
+Install-Opam
+Install-Haxe-Deps
+Setup-Paths

+ 2 - 2
dune

@@ -1,2 +1,2 @@
-(dirs :standard \ tests std extra)
-(data_only_dirs lib)
+(dirs src libs)
+(data_only_dirs src-json)

+ 0 - 4
dune-project

@@ -4,7 +4,3 @@
 (package
 	(name haxe)
 )
-
-(package
-	(name haxe_prebuild)
-)

+ 0 - 2
dune-workspace.dev

@@ -1,2 +0,0 @@
-(lang dune 1.11)
-(profile release)

+ 2 - 5
extra/github-actions/build-mac.yml

@@ -8,8 +8,6 @@
     set -ex
     brew update
     brew bundle --file=tests/Brewfile --no-upgrade
-    cpanm IPC::System::Simple
-    cpanm String::ShellQuote
     curl -L https://github.com/madler/zlib/releases/download/v$ZLIB_VERSION/zlib-$ZLIB_VERSION.tar.gz | tar xz
     cd zlib-$ZLIB_VERSION
     ./configure
@@ -29,9 +27,8 @@
   if: steps.cache-opam.outputs.cache-hit != 'true'
   run: |
     set -ex
-    opam init # --disable-sandboxing
+    opam init -c ${{ env.OCAML_VERSION }}
     opam update
-    opam switch create ${{env.OCAML_VERSION}}
     eval $(opam env)
     opam env
     opam pin add haxe . --no-action
@@ -47,7 +44,7 @@
   run: |
     set -ex
     eval $(opam env)
-    opam config exec -- make -s -j`sysctl -n hw.ncpu` STATICLINK=1 "LIB_PARAMS=/usr/local/lib/libz.a /usr/local/lib/libpcre2-8.a /usr/local/lib/libmbedtls.a /usr/local/lib/libmbedcrypto.a /usr/local/lib/libmbedx509.a -cclib '-framework Security -framework CoreFoundation'" haxe
+    opam config exec -- make -s STATICLINK=1 "LIB_PARAMS=\"/usr/local/lib/libz.a\" \"/usr/local/lib/libpcre2-8.a\" \"/usr/local/lib/libmbedtls.a\" \"/usr/local/lib/libmbedcrypto.a\" \"/usr/local/lib/libmbedx509.a\"" haxe
     opam config exec -- make -s haxelib
     make -s package_unix package_installer_mac
     ls -l out

+ 9 - 25
extra/github-actions/build-windows.yml

@@ -1,14 +1,3 @@
-- name: Expose mingw dll files
-  shell: pwsh
-  run: Write-Host "::add-path::${env:CYG_ROOT}/usr/$($env:MINGW_ARCH)-w64-mingw32/sys-root/mingw/bin"
-
-# required to be able to retrieve the revision
-- name: Mark directory as safe
-  shell: pwsh
-  run: |
-    Set-PSDebug -Trace 1
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'git config --global --add safe.directory "$OLDPWD"')
-
 - name: Set ADD_REVISION=1 for non-release
   if: ${{ !startsWith(github.ref, 'refs/tags/') }}
   shell: pwsh
@@ -18,20 +7,15 @@
   shell: pwsh
   run: |
     Set-PSDebug -Trace 1
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -s -f Makefile.win -j`nproc` haxe 2>&1')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -s -f Makefile.win haxelib 2>&1')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && opam config exec -- make -f Makefile.win echo_package_files package_bin package_installer_win package_choco 2>&1')
-    dir out
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && cygcheck ./haxe.exe')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && cygcheck ./haxelib.exe')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && ls ./out')
-
-- name: Check artifact
-  shell: bash
-  run: |
-    ls out
-    # Output should contain binaries zip, installer zip and nupkg
-    [ $(ls -1 out | wc -l) -eq "3" ]
+    # stop after any command returns an error
+    $PSNativeCommandUseErrorActionPreference = $true
+    $ErrorActionPreference = 'Stop'
+    opam exec -- make -s -f Makefile.win -j"$env:NUMBER_OF_PROCESSORS" haxe
+    opam exec -- make -s -f Makefile.win haxelib
+    opam exec -- make -f Makefile.win echo_package_files package_bin package_installer_win package_choco
+    cygcheck ./haxe.exe
+    cygcheck ./haxelib.exe
+    ls ./out
 
 - name: Upload artifact
   uses: actions/upload-artifact@v4

+ 15 - 5
extra/github-actions/install-ocaml-libs-windows.yml

@@ -1,6 +1,16 @@
 - name: Install OCaml libraries
-  shell: pwsh
-  run: |
-    Set-PSDebug -Trace 1
-    opam install haxe --deps-only
-    opam list
+  uses: nick-fields/retry@v3
+  with:
+    timeout_minutes: 10
+    max_attempts: 10
+    retry_on: timeout
+    shell: pwsh
+    command: |
+      Set-PSDebug -Trace 1
+      # stop after any command returns an error
+      $PSNativeCommandUseErrorActionPreference = $true
+      $ErrorActionPreference = 'Stop'
+      # see: https://github.com/aantron/luv/issues/162
+      $env:PATH="${env:CYG_ROOT}\bin;${env:CYG_ROOT}\usr\x86_64-w64-mingw32\bin;${env:PATH}"
+      opam install haxe --deps-only
+      opam list

+ 7 - 31
extra/github-actions/install-ocaml-windows.yml

@@ -1,39 +1,15 @@
 - name: Setup ocaml
-  id: ocaml
-  continue-on-error: true
-  uses: kLabz/setup-ocaml@win32
+  uses: ocaml/setup-ocaml@v3
   with:
-    ocaml-compiler: 4.08.1
-    opam-depext: false
-    opam-repositories: |
-      opam-repository-mingw: https://github.com/ocaml-opam/opam-repository-mingw.git#sunset
-      default: https://github.com/ocaml/opam-repository.git
+    ocaml-compiler: ${{ env.OCAML_VERSION }}
     opam-local-packages: |
       haxe.opam
-    cache-prefix: w32-v1
-
-# TODO make it work on first try
-# (when cygwin cache doesn't exist, ocaml install fails with a curl error)
-- name: Setup ocaml (second chance)
-  if: steps.ocaml.outcome == 'failure'
-  uses: kLabz/setup-ocaml@win32
-  with:
-    ocaml-compiler: 4.08.1
-    opam-depext: false
-    opam-repositories: |
-      opam-repository-mingw: https://github.com/ocaml-opam/opam-repository-mingw.git#sunset
-      default: https://github.com/ocaml/opam-repository.git
-    opam-local-packages: |
-      haxe.opam
-    cache-prefix: w32-v1
 
 - name: Install dependencies
   shell: pwsh
+  env:
+    MBEDTLS_VERSION: 2.16.3
   run: |
-    Set-PSDebug -Trace 1
-    curl.exe -fsSL -o "libmbedtls.tar.xz" --retry 3 https://github.com/Simn/mingw64-mbedtls/releases/download/2.16.3/mingw64-$($env:MINGW_ARCH)-mbedtls-2.16.3-1.tar.xz
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'curl -L https://cpanmin.us | perl - App::cpanminus')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm IPC::System::Simple module')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm String::ShellQuote')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'echo "$OLDPWD"')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && tar -C / -xvf libmbedtls.tar.xz')
+    curl.exe -fsSL -o "libmbedtls.tar.xz" --retry 3 `
+      https://github.com/Simn/mingw64-mbedtls/releases/download/${{ env.MBEDTLS_VERSION }}/mingw64-${{ env.MINGW_ARCH }}-mbedtls-${{ env.MBEDTLS_VERSION }}-1.tar.xz
+    ${{ env.CYG_ROOT }}\bin\tar.exe -C ${{ env.CYG_ROOT }} -xvf libmbedtls.tar.xz

+ 0 - 20
extra/github-actions/install-ocaml-windows64.yml

@@ -1,20 +0,0 @@
-- name: Setup ocaml
-  uses: ocaml/setup-ocaml@v2
-  with:
-    ocaml-compiler: 4.08.1
-    opam-repositories: |
-      opam-repository-mingw: https://github.com/ocaml-opam/opam-repository-mingw.git#sunset
-      default: https://github.com/ocaml/opam-repository.git
-    opam-local-packages: |
-      haxe.opam
-
-- name: Install dependencies
-  shell: pwsh
-  run: |
-    Set-PSDebug -Trace 1
-    curl.exe -fsSL -o "libmbedtls.tar.xz" --retry 3 https://github.com/Simn/mingw64-mbedtls/releases/download/2.16.3/mingw64-$($env:MINGW_ARCH)-mbedtls-2.16.3-1.tar.xz
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'curl -L https://cpanmin.us | perl - App::cpanminus')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm IPC::System::Simple module')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cpanm String::ShellQuote')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'echo "$OLDPWD"')
-    & "$($env:CYG_ROOT)/bin/bash.exe" @('-lc', 'cd "$OLDPWD" && tar -C / -xvf libmbedtls.tar.xz')

+ 123 - 66
extra/github-actions/workflows/main.yml

@@ -3,6 +3,13 @@
 name: CI
 on: [push, pull_request]
 
+env:
+  OCAML_VERSION: 5.3.0
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
 jobs:
   windows64-build:
     runs-on: windows-latest
@@ -17,26 +24,17 @@ jobs:
         with:
           submodules: recursive
 
-      - name: Use GNU Tar from msys
-        run: |
-          echo "C:\msys64\usr\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
-          rm C:\msys64\usr\bin\bash.exe
-
       @import install-nsis.yml
       @import install-neko-windows.yml
-      @import install-ocaml-windows64.yml
+      @import install-ocaml-windows.yml
       @import install-ocaml-libs-windows.yml
       @import build-windows.yml
 
   linux-build:
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       OPAMYES: 1
-    strategy:
-      fail-fast: false
-      matrix:
-        ocaml: ["4.08.1", "5.0.0"]
     steps:
       - uses: actions/checkout@main
         with:
@@ -47,25 +45,24 @@ jobs:
         uses: actions/cache@v4
         with:
           path: ~/.opam/
-          key: ${{ runner.os }}-${{ matrix.ocaml }}-${{ hashFiles('./haxe.opam', './libs/') }}
+          key: ${{ runner.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
       @import install-neko-unix.yml
 
       - name: Install dependencies
         run: |
           set -ex
-          sudo add-apt-repository ppa:avsm/ppa -y # provides OPAM 2
-          sudo add-apt-repository ppa:haxe/ocaml -y # provides newer version of mbedtls
           sudo apt-get update -qqy
-          sudo apt-get install -qqy ocaml-nox camlp5 opam libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build libstring-shellquote-perl libipc-system-simple-perl
+          sudo apt-get install -qqy darcs bubblewrap ocaml-nox libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build
+          curl -sSL https://github.com/ocaml/opam/releases/download/2.3.0/opam-2.3.0-x86_64-linux -o $RUNNER_TEMP/opam
+          sudo install $RUNNER_TEMP/opam /usr/local/bin/opam
 
       - name: Install OCaml libraries
         if: steps.cache-opam.outputs.cache-hit != 'true'
         run: |
           set -ex
-          opam init # --disable-sandboxing
+          opam init -c ${{ env.OCAML_VERSION }}
           opam update
-          opam switch create ${{ matrix.ocaml }}
           opam pin add haxe . --no-action
           opam install haxe --deps-only --assume-depexts
           opam list
@@ -93,7 +90,6 @@ jobs:
         run: echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
 
       - name: Build xmldoc
-        if: matrix.ocaml == '4.08.1'
         run: |
           set -ex
           make -s xmldoc
@@ -107,19 +103,18 @@ jobs:
       - name: Upload artifact
         uses: actions/upload-artifact@v4
         with:
-          name: linuxBinaries${{ (matrix.ocaml == '5.0.0' && '_ocaml5') || '' }}
+          name: linuxBinaries
           path: out
 
       - name: Upload xmldoc artifact
         uses: actions/upload-artifact@v4
-        if: matrix.ocaml == '4.08.1'
         with:
           name: xmldoc
           path: extra/doc
 
   linux-test:
     needs: linux-build
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       TEST: ${{matrix.target}}
@@ -128,7 +123,6 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        ocaml: ["4.08.1", "5.0.0"]
         target: [macro, js, hl, cpp, jvm, php, python, lua, flash, neko]
         include:
           - target: hl
@@ -145,7 +139,7 @@ jobs:
           submodules: recursive
       - uses: actions/download-artifact@v4
         with:
-          name: linuxBinaries${{ (matrix.ocaml == '5.0.0' && '_ocaml5') || '' }}
+          name: linuxBinaries
           path: linuxBinaries
 
       @import install-neko-unix.yml
@@ -188,7 +182,7 @@ jobs:
 
   test-docgen:
     needs: linux-build
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-22.04
     env:
       PLATFORM: linux64
       HXCPP_COMPILE_CACHE: ~/hxcache
@@ -248,61 +242,125 @@ jobs:
           cpp/Dox -i ../../xmldoc -ex microsoft -ex javax -theme $(haxelib libpath dox)/themes/default
         working-directory: ${{github.workspace}}/tests/docgen
 
-  linux-arm64:
-    runs-on: ubuntu-20.04
-    permissions:
-      packages: write
+  linux-arm64-build:
+    runs-on: ubuntu-22.04-arm
     env:
-      FORCE_COLOR: 1
+      PLATFORM: linux-arm64
+      OPAMYES: 1
     steps:
-      - name: Login to GitHub Container Registry
-        uses: docker/login-action@v3
+      - uses: actions/checkout@main
         with:
-          registry: ghcr.io
-          username: ${{ github.actor }}
-          password: ${{ secrets.GITHUB_TOKEN }}
-
-      - name: Install Earthly
-        run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/download/v0.6.13/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete'
+          submodules: recursive
 
-      - name: Set up QEMU
-        id: qemu
-        uses: docker/setup-qemu-action@v3
+      - name: Cache opam
+        id: cache-opam
+        uses: actions/cache@v4
         with:
-            image: tonistiigi/binfmt:latest
-            platforms: all
+          path: ~/.opam/
+          key: arm-${{ runner.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
-      - uses: actions/checkout@main
-        with:
-          submodules: recursive
+      @import install-neko-unix.yml
 
-      - name: Set CONTAINER_ vars
+      - name: Install dependencies
         run: |
-          echo "CONTAINER_REG=$(echo ${{ github.repository }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV;
-          echo "CONTAINER_TAG=$(echo ${{ github.ref_name }} | sed -e 's/[^A-Za-z0-9\.]/-/g')" >> $GITHUB_ENV;
+          set -ex
+          sudo apt-get update -qqy
+          sudo apt-get install -qqy opam libpcre2-dev zlib1g-dev libgtk2.0-dev libmbedtls-dev ninja-build
 
-      - name: Build devcontainer
-        run: earthly --platform=linux/arm64 +devcontainer --IMAGE_NAME="ghcr.io/${CONTAINER_REG}_devcontainer" --IMAGE_TAG="${CONTAINER_TAG}-arm64" --IMAGE_CACHE="ghcr.io/haxefoundation/haxe_devcontainer:development-arm64"
-        env:
-          EARTHLY_PUSH: "${{ github.event_name == 'push' }}"
-          EARTHLY_USE_INLINE_CACHE: true
-          EARTHLY_SAVE_INLINE_CACHE: true
+      - name: Install OCaml libraries
+        if: steps.cache-opam.outputs.cache-hit != 'true'
+        run: |
+          set -ex
+          opam init -c ${{ env.OCAML_VERSION }}
+          opam pin add haxe . --no-action
+          opam install haxe --deps-only --assume-depexts
+          opam list
+          ocamlopt -v
 
       - name: Set ADD_REVISION=1 for non-release
         if: ${{ !startsWith(github.ref, 'refs/tags/') }}
         run: echo "ADD_REVISION=1" >> $GITHUB_ENV
 
-      - name: Build
-        run: earthly --platform=linux/arm64 +build --ADD_REVISION="$ADD_REVISION" --SET_SAFE_DIRECTORY="true"
-        env:
-          EARTHLY_PUSH: "${{ github.event_name == 'push' }}"
-          EARTHLY_REMOTE_CACHE: "ghcr.io/${{env.CONTAINER_REG}}_cache:build-${{env.CONTAINER_TAG}}-arm64"
+      - name: Build Haxe
+        run: |
+          set -ex
+          eval $(opam env)
+          opam config exec -- make -s -j`nproc` STATICLINK=1 haxe
+          opam config exec -- make -s haxelib
+          make -s package_unix
+          ls -l out
+          ldd -v ./haxe
+          ldd -v ./haxelib
+
+      # https://stackoverflow.com/questions/58033366/how-to-get-current-branch-within-github-actions
+      - name: Extract branch name
+        id: extract_branch
+        shell: bash
+        run: echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
 
       - name: Upload artifact
         uses: actions/upload-artifact@v4
         with:
           name: linuxArm64Binaries
-          path: out/linux/arm64
+          path: out
+
+  linux-arm64-test:
+    needs: linux-arm64-build
+    runs-on: ubuntu-22.04-arm
+    env:
+      PLATFORM: linux-arm64
+      TEST: ${{matrix.target}}
+      HXCPP_COMPILE_CACHE: ~/hxcache
+      HAXE_STD_PATH: /usr/local/share/haxe/std
+    strategy:
+      fail-fast: false
+      matrix:
+        target: [macro, js, cpp, jvm, php, python, lua, neko]
+        include:
+          - target: lua
+            APT_PACKAGES: ncurses-dev
+    steps:
+      - uses: actions/checkout@main
+        with:
+          submodules: recursive
+      - uses: actions/download-artifact@v4
+        with:
+          name: linuxArm64Binaries
+          path: linuxBinaries
+
+      @import install-neko-unix.yml
+
+      - name: Setup Haxe
+        run: |
+          sudo apt install -qqy libmbedtls-dev
+
+          set -ex
+          tar -xf linuxBinaries/*_bin.tar.gz -C linuxBinaries --strip-components=1
+          sudo mkdir -p /usr/local/bin/
+          sudo mkdir -p /usr/local/share/haxe/
+          sudo ln -s `pwd`/linuxBinaries/haxe /usr/local/bin/haxe
+          sudo ln -s `pwd`/linuxBinaries/haxelib /usr/local/bin/haxelib
+          sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
+
+      - name: Print Haxe version
+        run: haxe -version
+
+      - name: Setup haxelib
+        run: |
+          set -ex
+          mkdir ~/haxelib
+          haxelib setup ~/haxelib
+
+      - name: Install apt packages
+        if: matrix.APT_PACKAGES
+        run: |
+          set -ex
+          sudo apt update -qqy
+          sudo apt install -qqy ${{matrix.APT_PACKAGES}}
+
+      - name: Test
+        run: haxe RunCi.hxml
+        working-directory: ${{github.workspace}}/tests
 
   mac-build:
     strategy:
@@ -314,7 +372,6 @@ jobs:
       PLATFORM: mac${{ matrix.os == 'macos-14' && '-arm64' || '' }}
       OPAMYES: 1
       MACOSX_DEPLOYMENT_TARGET: 10.13
-      OCAML_VERSION: 5.1.1
     steps:
       - uses: actions/checkout@main
         with:
@@ -325,7 +382,7 @@ jobs:
         uses: actions/cache@v4
         with:
           path: ~/.opam/
-          key: ${{ matrix.os }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
+          key: ${{ matrix.os }}-${{ env.OCAML_VERSION }}-${{ hashFiles('./haxe.opam', './libs/') }}-1
 
       @import install-neko-unix.yml
       @import build-mac.yml
@@ -418,8 +475,8 @@ jobs:
 
   deploy:
     if: success() && github.repository_owner == 'HaxeFoundation' && github.event_name != 'pull_request'
-    needs: [linux-test, linux-arm64, mac-test, windows64-test]
-    runs-on: ubuntu-20.04
+    needs: [linux-test, linux-arm64-test, mac-test, windows64-test]
+    runs-on: ubuntu-22.04
     steps:
       # this is only needed for to get `COMMIT_DATE`...
       # maybe https://github.community/t/expose-commit-timestamp-in-the-github-context-data/16460/3
@@ -488,8 +545,8 @@ jobs:
 
   deploy_apidoc:
     if: success() && github.repository_owner == 'HaxeFoundation' && github.event_name != 'pull_request'
-    needs: [linux-test, linux-arm64, mac-test, windows64-test]
-    runs-on: ubuntu-20.04
+    needs: [linux-test, linux-arm64-test, mac-test, windows64-test]
+    runs-on: ubuntu-22.04
     steps:
       - name: Install dependencies
         run: |

+ 5 - 4
haxe.opam

@@ -11,7 +11,7 @@ maintainer: ["Haxe Foundation <[email protected]>" "Andy Li <[email protected]>
 authors: "Haxe Foundation <[email protected]>"
 homepage: "https://haxe.org/"
 bug-reports: "https://github.com/HaxeFoundation/haxe/issues"
-license: ["GPL2+" "MIT"]
+license: ["GPL-2.0-or-later" "MIT"]
 dev-repo: "git+https://github.com/HaxeFoundation/haxe.git"
 build: [
   [make]
@@ -19,19 +19,20 @@ build: [
 install: [make "install" "INSTALL_DIR=%{prefix}%"]
 remove: [make "uninstall" "INSTALL_DIR=%{prefix}%"]
 depends: [
-  ("ocaml" {>= "5.0"} & ("camlp5" {build}))
-    | ("ocaml" {>= "4.08" & < "5.0"} & ("camlp5" {build & = "8.00.03"}))
+  "ocaml"
   "ocamlfind" {build}
-  "dune" {>= "1.11" & < "3.16"}
+  "dune" {>= "3.17"}
   "sedlex" {>= "2.0"}
   "xml-light"
   "extlib" {>= "1.7.8"}
   "sha"
   "camlp-streams"
+  "ppx_parser" {>= "0.2.0"}
   "conf-libpcre2-8"
   "conf-zlib"
   "conf-neko"
   "luv" {>= "0.5.13"}
   "ipaddr"
   "terminal_size"
+  "domainslib"
 ]

+ 0 - 18
libs/.gitignore

@@ -1,18 +0,0 @@
-*.obj
-*.o
-*.cmx
-*.cmi
-*.cmxa
-*.a
-*.exe
-.*.swp
-*.lib
-
-/xml-light/doc
-/xml-light/xml_lexer.ml
-/xml-light/xml_parser.ml
-/xml-light/xml_parser.mli
-
-/ilib/dump
-*.cmo
-*.cma

+ 0 - 23
libs/Makefile

@@ -1,23 +0,0 @@
-OCAMLOPT = ocamlopt
-OCAMLC = ocamlc
-TARGET_FLAG = all
-LIBS=extlib-leftovers extc neko javalib ilib swflib objsize pcre2 ziplib
-
-all: $(LIBS)
-$(LIBS):
-	$(MAKE) -C $@ OCAMLOPT=$(OCAMLOPT) OCAMLC=$(OCAMLC) $(TARGET_FLAG)
-
-clean:
-	$(MAKE) -C extlib-leftovers clean
-	$(MAKE) -C extc clean
-	$(MAKE) -C neko clean
-	$(MAKE) -C javalib clean
-	$(MAKE) -C ilib clean
-	$(MAKE) -C swflib clean
-	$(MAKE) -C objsize clean
-	$(MAKE) -C pcre2 clean
-	$(MAKE) -C ziplib clean
-
-.PHONY: all clean $(LIBS)
-
-Makefile: ;

+ 0 - 30
libs/extc/Makefile

@@ -1,30 +0,0 @@
-ALL_CFLAGS = $(CFLAGS)
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-SRC = extc.ml process.ml extc_stubs.c process_stubs.c
-
-all: bytecode native
-
-bytecode: extc.cma
-
-native: extc.cmxa
-
-extc.cma: extc_stubs.o process_stubs.o extc.ml process.ml
-	ocamlfind $(OCAMLC) -safe-string -a -o extc.cma -package extlib extc.ml process.ml
-
-extc.cmxa: extc.ml process.ml extc_stubs.o process_stubs.o
-	ocamlfind $(OCAMLOPT) -safe-string -a -o extc.cmxa -package extlib extc.ml process.ml
-
-extc_stubs.o: extc_stubs.c
-	ocamlfind $(OCAMLC) -safe-string $(ALL_CFLAGS) extc_stubs.c
-
-process_stubs.o: process_stubs.c
-	ocamlfind $(OCAMLC) -safe-string $(ALL_CFLAGS) process_stubs.c
-
-clean:
-	rm -f extc.cma extc.cmi extc.cmx extc.cmxa extc.o extc.obj extc.lib extc_stubs.obj extc_stubs.o process.cmx process.obj process.cmi process.o process_stubs.obj process_stubs.o
-	rm -f extc.a libextc.a libextc.lib extc.cmo process.cmo
-
-.PHONY: all bytecode native clean
-Makefile: ;
-$(SRC): ;

+ 1 - 7
libs/extc/process_stubs.c

@@ -37,13 +37,7 @@
 #	include <unistd.h>
 #	include <errno.h>
 #	include <string.h>
-#	ifndef __APPLE__
-#		if defined(__FreeBSD__) || defined(__DragonFly__)
-#			include <sys/wait.h>
-#		else
-#			include <wait.h>
-#		endif
-#	endif
+#	include <sys/wait.h>
 #endif
 
 #ifdef _WIN32

+ 0 - 35
libs/extlib-leftovers/Makefile

@@ -1,35 +0,0 @@
-# Makefile contributed by Alain Frisch
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-
-MODULES = \
- multiArray rbuffer uCharExt uTF8
-
-# the list is topologically sorted
-
-MLI = $(MODULES:=.mli)
-SRC = $(MLI) $(MODULES:=.ml)
-
-all: bytecode native
-
-opt: native
-
-bytecode: extlib-leftovers.cma
-
-native: extlib-leftovers.cmxa
-
-extlib-leftovers.cma: $(SRC)
-	$(OCAMLC) -safe-string -a -o extlib-leftovers.cma $(SRC)
-
-extlib-leftovers.cmxa: $(SRC)
-	$(OCAMLOPT) -safe-string -g -a -o extlib-leftovers.cmxa $(SRC)
-
-clean:
-	rm -f $(wildcard *.cmo) $(wildcard *.cmx) $(wildcard *.o) $(wildcard *.cmi) $(wildcard *.cma) $(wildcard *.cmxa) $(wildcard *.a) $(wildcard *.lib) $(wildcard *.obj)
-	rm -Rf doc
-
-.PHONY: all opt bytecode native doc copy install uninstall clean
-
-Makefile: ;
-
-$(SRC): ;

+ 0 - 23
libs/neko/Makefile

@@ -1,23 +0,0 @@
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-SRC=nast.ml nxml.ml binast.ml nbytecode.ml ncompile.ml
-
-all: bytecode native
-
-native: neko.cmxa
-
-bytecode: neko.cma
-
-neko.cmxa: $(SRC)
-	ocamlfind $(OCAMLOPT) -package extlib -safe-string -a -o neko.cmxa $(SRC)
-
-neko.cma: $(SRC)
-	ocamlfind $(OCAMLC) -package extlib -safe-string -a -o neko.cma $(SRC)
-
-clean:
-	rm -rf neko.cmxa neko.cma neko.lib neko.a $(wildcard *.cmx) $(wildcard *.obj) $(wildcard *.o) $(wildcard *.cmi) $(wildcard *.cmo)
-
-.PHONY: all bytecode native clean
-
-Makefile: ;
-$(SRC): ;

+ 0 - 29
libs/objsize/Makefile

@@ -1,29 +0,0 @@
-ALL_CFLAGS = $(CFLAGS) -I .
-LIBS =
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-SRC=objsize.mli objsize.ml
-
-all: bytecode native
-
-bytecode: objsize.cma
-
-native: objsize.cmxa
-
-objsize.cma: c_objsize.o $(SRC)
-	$(OCAMLC) -safe-string -a -o objsize.cma $(LIBS) $(SRC)
-
-objsize.cmxa: c_objsize.o $(SRC)
-	$(OCAMLOPT) -safe-string -a -o objsize.cmxa $(LIBS) $(SRC)
-
-c_objsize.o: c_objsize.c
-	$(OCAMLC) -safe-string $(ALL_CFLAGS) c_objsize.c
-
-clean:
-	rm -rf $(wildcard *.cma) $(wildcard *.cmxa) $(wildcard *.cmx) $(wildcard *.cmi) $(wildcard *.cmo) $(wildcard *.obj) $(wildcard *.o) $(wildcard *.a)
-
-.PHONY: all bytecode native clean
-
-Makefile: ;
-$(SRC): ;
-c_objsize.c: ;

+ 0 - 28
libs/pcre2/Makefile

@@ -1,28 +0,0 @@
-ALL_CFLAGS = $(CFLAGS) -I pcre2
-LIBS =
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-SRC = pcre2.ml pcre2_stubs.c
-
-all: bytecode native
-
-bytecode: pcre2.cma
-
-native: pcre2.cmxa
-
-pcre2.cma: pcre2_stubs.o pcre2.ml
-	$(OCAMLC) -safe-string -a -o pcre2.cma $(LIBS) pcre2.ml
-
-pcre2.cmxa: pcre2.ml pcre2_stubs.o
-	$(OCAMLOPT) -safe-string -a -o pcre2.cmxa $(LIBS) pcre2.ml
-
-pcre2_stubs.o: pcre2_stubs.c
-	$(OCAMLC) -safe-string $(ALL_CFLAGS) pcre2_stubs.c
-
-clean:
-	rm -f pcre2.cma pcre2.cmi pcre2.cmx pcre2.cmxa pcre2.o pcre2.obj pcre2_stubs.obj pcre2_stubs.o
-	rm -f pcre2.a libpcre2-8.a libpcre2-8.lib pcre2.cmo
-
-.PHONY: all bytecode native clean
-Makefile: ;
-$(SRC): ;

+ 0 - 81
libs/swflib/Makefile

@@ -1,81 +0,0 @@
-# Makefile generated by OCamake
-# http://tech.motion-twin.com
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-.SUFFIXES : .ml .mli .cmo .cmi .cmx .mll .mly
-
-ALL_CFLAGS= $(CFLAGS) -safe-string -package extlib -I ../extlib-leftovers -I ../extc -g
-LIBS=
-
-SRC=actionScript.ml as3hl.mli as3.mli png.ml swflib.sln swf.ml swfPic.ml as3code.ml as3hlparse.ml as3parse.ml png.mli swfParser.ml
-
-MODULES=as3code.cmx png.cmx swf.cmx actionScript.cmx as3parse.cmx swfPic.cmx as3hlparse.cmx swfParser.cmx
-
-all: native bytecode
-
-native: swflib.cmxa
-
-bytecode: swflib.cma
-
-swflib.cmxa: $(MODULES)
-	ocamlfind $(OCAMLOPT) -safe-string -o swflib.cmxa -a $(LIBS) $(MODULES)
-
-swflib.cma: $(MODULES:.cmx=.cmo)
-	ocamlfind $(OCAMLC) -safe-string -o swflib.cma -a $(LFLAGS) $(LIBS) $(MODULES:.cmx=.cmo)
-
-actionScript.cmx: swf.cmx
-
-actionScript.cmo: swf.cmi
-
-as3code.cmo: as3.cmi
-
-as3code.cmx: as3.cmi
-
-as3hl.cmi: as3.cmi
-
-as3hlparse.cmo: as3parse.cmo as3hl.cmi as3code.cmo as3.cmi
-
-as3hlparse.cmx: as3parse.cmx as3hl.cmi as3code.cmx as3.cmi
-
-as3parse.cmo: as3code.cmo as3.cmi
-
-as3parse.cmx: as3code.cmx as3.cmi
-
-png.cmo: png.cmi
-
-png.cmx: png.cmi
-
-swf.cmo: as3.cmi
-
-swf.cmx: as3.cmi
-
-swfParser.cmo: swf.cmo as3parse.cmo actionScript.cmo
-
-swfParser.cmx: swf.cmx as3parse.cmx actionScript.cmx
-
-swfPic.cmx: swf.cmx png.cmi
-
-clean:
-	rm -f swflib.cmxa swflib.cma swflib.lib swflib.a as3.cmi as3hl.cmi
-	rm -f $(MODULES) $(MODULES:.cmx=.obj) $(MODULES:.cmx=.cmi) $(MODULES:.cmx=.o) $(MODULES:.cmx=.cmo)
-
-# SUFFIXES
-.ml.cmo:
-	ocamlfind $(OCAMLC) $(ALL_CFLAGS) -c $<
-
-.ml.cmx:
-	ocamlfind $(OCAMLOPT) $(ALL_CFLAGS) -c $<
-
-.mli.cmi:
-	ocamlfind $(OCAMLC) $(ALL_CFLAGS) $<
-
-.mll.ml:
-	ocamlfind ocamllex $<
-
-.mly.ml:
-	ocamlfind ocamlyacc $<
-
-.PHONY: all bytecode native clean
-
-Makefile: ;
-$(SRC): ;

+ 0 - 22
libs/ziplib/Makefile

@@ -1,22 +0,0 @@
-OCAMLOPT=ocamlopt
-OCAMLC=ocamlc
-SRC=zlib.mli zlib.ml zip.mli zip.ml
-
-all: native bytecode
-
-native: ziplib.cmxa
-ziplib.cmxa: $(SRC)
-	ocamlfind $(OCAMLOPT) -safe-string -g -I ../extlib -I ../extc -a -o ziplib.cmxa $(SRC)
-
-bytecode: ziplib.cma
-ziplib.cma: $(SRC)
-	ocamlfind $(OCAMLC) -safe-string -g -I ../extlib -I ../extc -a -o ziplib.cma $(SRC)
-
-clean:
-	rm -rf ziplib.cmxa ziplib.cma ziplib.lib ziplib.a $(wildcard *.cmx) $(wildcard *.obj) $(wildcard *.o) $(wildcard *.cmi) $(wildcard *.cmo)
-
-.PHONY: all native bytecode clean
-
-Makefile: ;
-
-$(SRC): ;

+ 0 - 7
libs/ziplib/test/Makefile

@@ -1,7 +0,0 @@
-OCAMLOPT=ocamlopt
-
-all: ../zip.cmxa minizip.ml
-	$(OCAMLOPT) -g -g -I .. -I ../../extc -o minizip -cclib ../../extc/extc_stubs.o -cclib -lz unix.cmxa ../zip.cmxa minizip.ml
-
-clean:
-	rm -rf minizip $(wildcard *.cmx) $(wildcard *.obj) $(wildcard *.o) $(wildcard *.cmi)

+ 52 - 10
src-json/define.json

@@ -18,7 +18,8 @@
 	},
 	{
 		"name": "AnalyzerTimes",
-		"define": "analyzer-times",
+		"define": "times.analyzer",
+		"deprecatedDefine": "analyzer-times",
 		"doc": "Record detailed timers for the analyzer",
 		"params": ["level: 0 | 1 | 2"]
 	},
@@ -53,6 +54,7 @@
 		"define": "dce",
 		"doc": "Set the dead code elimination mode. (default: std)",
 		"params": ["mode: std | full | no"],
+		"default": "std",
 		"links": ["https://haxe.org/manual/cr-dce.html"]
 	},
 	{
@@ -71,6 +73,11 @@
 		"define": "disable-hxb-cache",
 		"doc": "Use in-memory cache instead of hxb powered cache."
 	},
+	{
+		"name": "DisableHxbOptimizations",
+		"define": "disable-hxb-optimizations",
+		"doc": "Disable shortcuts used by hxb cache to speed up display requests."
+	},
 	{
 		"name": "DisableUnicodeStrings",
 		"define": "disable-unicode-strings",
@@ -109,6 +116,7 @@
 		"name": "DumpPath",
 		"define": "dump-path",
 		"doc": "Path to generate dumps to (default: \"dump\").",
+		"default": "dump",
 		"params": ["path"]
 	},
 	{
@@ -119,7 +127,8 @@
 	{
 		"name": "DumpIgnoreVarIds",
 		"define": "dump-ignore-var-ids",
-		"doc": "Remove variable IDs from non-pretty dumps (helps with diff)."
+		"doc": "Remove variable IDs from non-pretty dumps (helps with diff).",
+		"default": "1"
 	},
 	{
 		"name": "DynamicInterfaceClosures",
@@ -132,6 +141,7 @@
 		"define": "eval-call-stack-depth",
 		"doc": "Set maximum call stack depth for eval. (default: 1000)",
 		"platforms": ["eval"],
+		"default": "1000",
 		"params": ["depth"]
 	},
 	{
@@ -145,6 +155,7 @@
 		"define": "eval-print-depth",
 		"doc": "Set maximum print depth (before replacing with '<...>') for eval. (default: 5)",
 		"platforms": ["eval"],
+		"default": "5",
 		"params": ["depth"]
 	},
 	{
@@ -161,7 +172,8 @@
 	},
 	{
 		"name": "EvalTimes",
-		"define": "eval-times",
+		"define": "times.eval",
+		"deprecatedDefine": "eval-times",
 		"doc": "Record per-method execution times in macro/interp mode. Implies eval-stack.",
 		"platforms": ["eval"]
 	},
@@ -172,7 +184,8 @@
 	},
 	{
 		"name": "FilterTimes",
-		"define": "filter-times",
+		"define": "times.filter",
+		"deprecatedDefine": "filter-times",
 		"doc": "Record per-filter execution times upon --times."
 	},
 	{
@@ -233,6 +246,27 @@
 		"doc": "The current Haxe version value in SemVer format.",
 		"reserved": true
 	},
+	{
+		"name": "Haxe3",
+		"define": "haxe3",
+		"doc": "The current Haxe major version is >= 3.",
+		"default": "1",
+		"reserved": true
+	},
+	{
+		"name": "Haxe4",
+		"define": "haxe4",
+		"doc": "The current Haxe major version is >= 4.",
+		"default": "1",
+		"reserved": true
+	},
+	{
+		"name": "Haxe5",
+		"define": "haxe5",
+		"doc": "The current Haxe major version is >= 5.",
+		"default": "1",
+		"reserved": true
+	},
 	{
 		"name": "HaxeNext",
 		"define": "haxe-next",
@@ -262,13 +296,14 @@
 	{
 		"name": "HlVer",
 		"define": "hl-ver",
-		"doc": "The HashLink version to target. (default: 1.10.0)",
+		"doc": "The HashLink version to target. (default: 1.15.0)",
 		"platforms": ["hl"],
 		"params": ["version"]
 	},
 	{
 		"name": "HxbTimes",
-		"define": "hxb-times",
+		"define": "times.hxb",
+		"deprecatedDefine": "hxb-times",
 		"doc": "Display hxb timing when used with `--times`."
 	},
 	{
@@ -475,6 +510,7 @@
 		"name": "LoopUnrollMaxCost",
 		"define": "loop-unroll-max-cost",
 		"doc": "Maximum cost (number of expressions * iterations) before loop unrolling is canceled. (default: 250)",
+		"default": "250",
 		"params": ["cost"]
 	},
 	{
@@ -505,7 +541,8 @@
 	},
 	{
 		"name": "MacroTimes",
-		"define": "macro-times",
+		"define": "times.macro",
+		"deprecatedDefine": "macro-times",
 		"doc": "Display per-macro timing when used with `--times`."
 	},
 	{
@@ -549,7 +586,8 @@
 	{
 		"name": "NoDeprecationWarnings",
 		"define": "no-deprecation-warnings",
-		"doc": "Do not warn if fields annotated with `@:deprecated` are used."
+		"doc": "Do not warn if fields annotated with `@:deprecated` are used.",
+		"deprecated": "Use -w to configure warnings. See https://haxe.org/manual/cr-warnings.html for more information."
 	},
 	{
 		"name": "NoFlashOverride",
@@ -601,7 +639,8 @@
 	{
 		"name": "OldErrorFormat",
 		"define": "old-error-format",
-		"doc": "Use Haxe 3.x zero-based column error messages instead of new one-based format."
+		"doc": "Use Haxe 3.x zero-based column error messages instead of new one-based format.",
+		"deprecated": "OldErrorFormat has been removed in Haxe 5"
 	},
 	{
 		"name": "PhpPrefix",
@@ -786,7 +825,8 @@
 	{
 		"name": "WarnVarShadowing",
 		"define": "warn-var-shadowing",
-		"doc": "Warn about shadowing variable declarations."
+		"doc": "Warn about shadowing variable declarations.",
+		"deprecated": "Use -w to configure warnings. See https://haxe.org/manual/cr-warnings.html for more information."
 	},
 	{
 		"name": "NoTre",
@@ -797,6 +837,7 @@
 		"name": "MessageReporting",
 		"define": "message.reporting",
 		"doc": "Select message reporting mode for compiler output. (default: pretty)",
+		"default": "pretty",
 		"params": ["mode: classic | pretty | indent"]
 	},
 	{
@@ -818,6 +859,7 @@
 		"name": "MessageLogFormat",
 		"define": "message.log-format",
 		"doc": "Select message reporting mode for message log file. (default: indent)",
+		"default": "indent",
 		"params": ["format: classic | pretty | indent"]
 	}
 ]

+ 7 - 1
src-json/warning.json

@@ -76,7 +76,8 @@
 	{
 		"name": "WVarShadow",
 		"doc": "A local variable hides another by using the same name",
-		"parent": "WTyper"
+		"parent": "WTyper",
+		"enabled": false
 	},
 	{
 		"name": "WExternWithExpr",
@@ -132,5 +133,10 @@
 		"name": "WUnboundTypeParameter",
 		"doc": "Hxb (either --hxb output or haxe compiler cache) failed to link a type parameter to an actual type",
 		"parent": "WHxb"
+	},
+	{
+		"name": "WUnclosedMonomorph",
+		"doc": "Hxb writer failed to close a monomorph (that monomorph should have been closed in the first place)",
+		"parent": "WHxb"
 	}
 ]

+ 0 - 14
src-prebuild/dune

@@ -1,14 +0,0 @@
-(include_subdirs no)
-
-(env
-	(_
-		(flags (-w -9 -w -32))
-	)
-)
-
-(executable
-	(name prebuild)
-	(public_name haxe_prebuild)
-	(package haxe_prebuild)
-	(libraries extlib json)
-)

+ 0 - 345
src/codegen/codegen.ml

@@ -19,7 +19,6 @@
 
 open Ast
 open Type
-open Common
 open Globals
 open Extlib_leftovers
 
@@ -48,152 +47,6 @@ let get_properties fields =
 			| _ -> acc
 	) [] fields
 
-let add_property_field com c =
-	let p = c.cl_pos in
-	let props = get_properties (c.cl_ordered_statics @ c.cl_ordered_fields) in
-	match props with
-	| [] -> ()
-	| _ ->
-		let fields,values = List.fold_left (fun (fields,values) (n,v) ->
-			let cf = mk_field n com.basic.tstring p null_pos in
-			PMap.add n cf fields,((n,null_pos,NoQuotes),Texpr.Builder.make_string com.basic v p) :: values
-		) (PMap.empty,[]) props in
-		let t = mk_anon ~fields (ref Closed) in
-		let e = mk (TObjectDecl values) t p in
-		let cf = mk_field ~static:true "__properties__" t p null_pos in
-		cf.cf_expr <- Some e;
-		c.cl_statics <- PMap.add cf.cf_name cf c.cl_statics;
-		c.cl_ordered_statics <- cf :: c.cl_ordered_statics
-
-(* -------------------------------------------------------------------------- *)
-(* FIX OVERRIDES *)
-
-(*
-	on some platforms which doesn't support type parameters, we must have the
-	exact same type for overridden/implemented function as the original one
-*)
-
-let rec find_field com c f =
-	try
-		(match c.cl_super with
-		| None ->
-			raise Not_found
-		| Some ( {cl_path = (["cpp"],"FastIterator")}, _ ) ->
-			raise Not_found (* This is a strongly typed 'extern' and the usual rules don't apply *)
-		| Some (c,_) ->
-			find_field com c f)
-	with Not_found -> try
-		if com.platform = Cpp || com.platform = Hl then (* uses delegation for interfaces *)
-			raise Not_found;
-		let rec loop = function
-			| [] ->
-				raise Not_found
-			| (c,_) :: l ->
-				try
-					find_field com c f
-				with
-					Not_found -> loop l
-		in
-		loop c.cl_implements
-	with Not_found ->
-		let f = PMap.find f.cf_name c.cl_fields in
-		(match f.cf_kind with Var { v_read = AccRequire _ } -> raise Not_found | _ -> ());
-		f
-
-let fix_override com c f fd =
-	let f2 = (try Some (find_field com c f) with Not_found -> None) in
-	match f2,fd with
-		| Some (f2), Some(fd) ->
-			let targs, tret = (match follow f2.cf_type with TFun (args,ret) -> args, ret | _ -> die "" __LOC__) in
-			let changed_args = ref [] in
-			let prefix = "_tmp_" in
-			let nargs = List.map2 (fun ((v,ct) as cur) (_,_,t2) ->
-				try
-					type_eq EqStrict (monomorphs c.cl_params (monomorphs f.cf_params v.v_type)) t2;
-					(* Flash generates type parameters with a single constraint as that constraint type, so we
-					   have to detect this case and change the variable (issue #2712). *)
-					begin match follow v.v_type with
-						| TInst({cl_kind = KTypeParameter ttp} as cp,_) when com.platform = Flash ->
-							begin match get_constraints ttp with
-							| [tc] ->
-								if List.exists (fun tp -> tp.ttp_name = (snd cp.cl_path)) c.cl_params then raise (Unify_error [])
-							| _ ->
-								()
-							end
-						| _ ->
-							()
-					end;
-					cur
-				with Unify_error _ ->
-					let v2 = alloc_var VGenerated (prefix ^ v.v_name) t2 v.v_pos in
-					changed_args := (v,v2) :: !changed_args;
-					v2,ct
-			) fd.tf_args targs in
-			let fd2 = {
-				tf_args = nargs;
-				tf_type = tret;
-				tf_expr = (match List.rev !changed_args with
-					| [] -> fd.tf_expr
-					| args ->
-						let e = fd.tf_expr in
-						let el = (match e.eexpr with TBlock el -> el | _ -> [e]) in
-						let p = (match el with [] -> e.epos | e :: _ -> e.epos) in
-						let el_v = List.map (fun (v,v2) ->
-							mk (TVar (v,Some (mk (TCast (mk (TLocal v2) v2.v_type p,None)) v.v_type p))) com.basic.tvoid p
-						) args in
-						{ e with eexpr = TBlock (el_v @ el) }
-				);
-			} in
-			let targs = List.map (fun(v,c) -> (v.v_name, Option.is_some c, v.v_type)) nargs in
-			let fde = (match f.cf_expr with None -> die "" __LOC__ | Some e -> e) in
-			f.cf_expr <- Some { fde with eexpr = TFunction fd2 };
-			f.cf_type <- TFun(targs,tret);
-		| Some(f2), None when (has_class_flag c CInterface) ->
-			let targs, tret = (match follow f2.cf_type with TFun (args,ret) -> args, ret | _ -> die "" __LOC__) in
-			f.cf_type <- TFun(targs,tret)
-		| _ ->
-			()
-
-let fix_overrides com t =
-	match t with
-	| TClassDecl c ->
-		(* overrides can be removed from interfaces *)
-		if (has_class_flag c CInterface) then
-			c.cl_ordered_fields <- List.filter (fun f ->
-				try
-					if find_field com c f == f then raise Not_found;
-					c.cl_fields <- PMap.remove f.cf_name c.cl_fields;
-					false;
-				with Not_found ->
-					true
-			) c.cl_ordered_fields;
-		List.iter (fun f ->
-			match f.cf_expr, f.cf_kind with
-			| Some { eexpr = TFunction fd }, Method (MethNormal | MethInline) ->
-				fix_override com c f (Some fd)
-			| None, Method (MethNormal | MethInline) when (has_class_flag c CInterface) ->
-				fix_override com c f None
-			| _ ->
-				()
-		) c.cl_ordered_fields
-	| _ ->
-		()
-
-(*
-	PHP does not allow abstract classes extending other abstract classes to override any fields, so these duplicates
-	must be removed from the child interface
-*)
-let fix_abstract_inheritance com t =
-	match t with
-	| TClassDecl c when (has_class_flag c CInterface) ->
-		c.cl_ordered_fields <- List.filter (fun f ->
-			let b = try (find_field com c f) == f
-			with Not_found -> false in
-			if not b then c.cl_fields <- PMap.remove f.cf_name c.cl_fields;
-			b;
-		) c.cl_ordered_fields
-	| _ -> ()
-
 (* -------------------------------------------------------------------------- *)
 (* MISC FEATURES *)
 
@@ -216,204 +69,6 @@ let bytes_serialize data =
 	let tbl = Array.init (String.length b64) (fun i -> String.get b64 i) in
 	Bytes.unsafe_to_string (Base64.str_encode ~tbl data)
 
-module Dump = struct
-	(*
-		Make a dump of the full typed AST of all types
-	*)
-	let create_dumpfile acc l =
-		let ch = Path.create_file false ".dump" acc l in
-		let buf = Buffer.create 0 in
-		buf, (fun () ->
-			output_string ch (Buffer.contents buf);
-			close_out ch)
-
-	let create_dumpfile_from_path com path =
-		let buf,close = create_dumpfile [] ((dump_path com) :: (platform_name_macro com) :: fst path @ [snd path]) in
-		buf,close
-
-	let dump_types com pretty =
-		let s_type = s_type (Type.print_context()) in
-		let s_expr,s_type_param = if not pretty then
-			(Type.s_expr_ast (not (Common.defined com Define.DumpIgnoreVarIds)) "\t"),(Printer.s_type_param "")
-		else
-			(Type.s_expr_pretty false "\t" true),(s_type_param s_type)
-		in
-		let params tl = match tl with
-			| [] -> ""
-			| l -> Printf.sprintf "<%s>" (String.concat ", " (List.map s_type_param l))
-		in
-		List.iter (fun mt ->
-			let path = Type.t_path mt in
-			let buf,close = create_dumpfile_from_path com path in
-			let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
-			let s_metas ml tabs =
-				let args el =
-					match el with
-					| [] -> ""
-					| el -> Printf.sprintf "(%s)" (String.concat ", " (List.map (fun e -> Ast.Printer.s_expr e) el)) in
-				match ml with
-				| [] -> ""
-				| ml -> String.concat " " (List.map (fun me -> match me with (m,el,_) -> "@" ^ Meta.to_string m ^ args el) ml) ^ "\n" ^ tabs in
-			(match mt with
-			| Type.TClassDecl c ->
-				let s_cf_expr f =
-					match f.cf_expr with
-					| None -> ""
-					| Some e -> Printf.sprintf "%s" (s_expr s_type e) in
-				let is_inline_var v : bool = v = Var { v_read = AccInline; v_write = AccNever } in
-				let rec print_field stat f =
-					print "\n\t%s%s%s%s%s %s%s"
-						(s_metas f.cf_meta "\t")
-						(if (has_class_field_flag f CfPublic && not ((has_class_flag c CExtern) || (has_class_flag c CInterface))) then "public " else "")
-						(if stat then "static " else "")
-						(match f.cf_kind with
-							| Var v when (is_inline_var f.cf_kind) -> "inline "
-							| Var v -> ""
-							| Method m ->
-								match m with
-								| MethNormal -> ""
-								| MethDynamic -> "dynamic "
-								| MethInline -> "inline "
-								| MethMacro -> "macro ")
-						(match f.cf_kind with Var v -> "var" | Method m -> "function")
-						(f.cf_name ^ match f.cf_kind with
-							| Var { v_read = AccNormal; v_write = AccNormal } -> ""
-							| Var v when (is_inline_var f.cf_kind) -> ""
-							| Var v -> "(" ^ s_access true v.v_read ^ "," ^ s_access false v.v_write ^ ")"
-							| _ -> "")
-						(params f.cf_params);
-					(match f.cf_kind with
-						| Var v -> print ":%s%s;" (s_type f.cf_type)
-							(match f.cf_expr with
-							| None -> ""
-							| Some e -> " = " ^ (s_cf_expr f));
-						| Method m -> if ((has_class_flag c CExtern) || (has_class_flag c CInterface)) then (
-							match f.cf_type with
-							| TFun(al,t) -> print "(%s):%s;" (String.concat ", " (
-								List.map (fun (n,o,t) -> n ^ ":" ^ (s_type t)) al))
-								(s_type t)
-							| _ -> ()
-						) else print "%s" (s_cf_expr f));
-					print "\n";
-					List.iter (fun f -> print_field stat f) f.cf_overloads
-				in
-				print "%s%s%s%s %s%s" (s_metas c.cl_meta "") (if c.cl_private then "private " else "") (if (has_class_flag c CExtern) then "extern " else "") (if (has_class_flag c CInterface) then "interface" else "class") (s_type_path path) (params c.cl_params);
-				(match c.cl_super with None -> () | Some (c,pl) -> print " extends %s" (s_type (TInst (c,pl))));
-				List.iter (fun (c,pl) -> print " implements %s" (s_type (TInst (c,pl)))) c.cl_implements;
-				(match c.cl_array_access with None -> () | Some t -> print " implements ArrayAccess<%s>" (s_type t));
-				print " {\n";
-				(match c.cl_constructor with
-				| None -> ()
-				| Some f -> print_field false f);
-				List.iter (print_field false) c.cl_ordered_fields;
-				List.iter (print_field true) c.cl_ordered_statics;
-				(match TClass.get_cl_init c with
-				| None -> ()
-				| Some e ->
-					print "\n\tstatic function __init__() ";
-					print "%s" (s_expr s_type e);
-					print "\n");
-				print "}";
-			| Type.TEnumDecl e ->
-				print "%s%s%senum %s%s {\n" (s_metas e.e_meta "") (if e.e_private then "private " else "") (if has_enum_flag e EnExtern then "extern " else "") (s_type_path path) (params e.e_params);
-				List.iter (fun n ->
-					let f = PMap.find n e.e_constrs in
-					print "\t%s%s;\n" f.ef_name (
-						match f.ef_type with
-						| TFun (al,t) -> Printf.sprintf "(%s)" (String.concat ", "
-							(List.map (fun (n,o,t) -> (if o then "?" else "") ^ n ^ ":" ^ (s_type t)) al))
-						| _ -> "")
-				) e.e_names;
-				print "}"
-			| Type.TTypeDecl t ->
-				print "%s%stypedef %s%s = %s" (s_metas t.t_meta "") (if t.t_private then "private " else "") (s_type_path path) (params t.t_params) (s_type t.t_type);
-			| Type.TAbstractDecl a ->
-				print "%s%sabstract %s%s%s%s {}" (s_metas a.a_meta "") (if a.a_private then "private " else "") (s_type_path path) (params a.a_params)
-				(String.concat " " (List.map (fun t -> " from " ^ s_type t) a.a_from))
-				(String.concat " " (List.map (fun t -> " to " ^ s_type t) a.a_to));
-			);
-			close();
-		) com.types
-
-	let dump_record com =
-		List.iter (fun mt ->
-			let buf,close = create_dumpfile_from_path com (t_path mt) in
-			let s = match mt with
-				| TClassDecl c -> Printer.s_tclass "" c
-				| TEnumDecl en -> Printer.s_tenum "" en
-				| TTypeDecl t -> Printer.s_tdef "" t
-				| TAbstractDecl a -> Printer.s_tabstract "" a
-			in
-			Buffer.add_string buf s;
-			close();
-		) com.types
-
-	let dump_position com =
-		List.iter (fun mt ->
-			match mt with
-				| TClassDecl c ->
-					let buf,close = create_dumpfile_from_path com (t_path mt) in
-					Printf.bprintf buf "%s\n" (s_type_path c.cl_path);
-					let field cf =
-						Printf.bprintf buf "\t%s\n" cf.cf_name;
-						begin match cf.cf_expr with
-						| None -> ()
-						| Some e ->
-							Printf.bprintf buf "%s\n" (Texpr.dump_with_pos "\t" e);
-						end
-					in
-					Option.may field c.cl_constructor;
-					List.iter field c.cl_ordered_statics;
-					List.iter field c.cl_ordered_fields;
-					close();
-				| _ ->
-					()
-		) com.types
-
-	let dump_types com =
-		match Common.defined_value_safe com Define.Dump with
-			| "pretty" -> dump_types com true
-			| "record" -> dump_record com
-			| "position" -> dump_position com
-			| _ -> dump_types com false
-
-	let dump_dependencies ?(target_override=None) com =
-		let target_name = match target_override with
-			| None -> platform_name_macro com
-			| Some s -> s
-		in
-		let dump_dependencies_path = [dump_path com;target_name;"dependencies"] in
-		let buf,close = create_dumpfile [] dump_dependencies_path in
-		let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
-		let dep = Hashtbl.create 0 in
-		List.iter (fun m ->
-			print "%s:\n" (Path.UniqueKey.lazy_path m.m_extra.m_file);
-			PMap.iter (fun _ mdep ->
-				let (ctx,m2) = match mdep.md_kind with
-					| MMacro when not com.is_macro_context ->
-						("[macro] ", (Option.get (com.get_macros())).module_lut#find mdep.md_path)
-					| _ ->
-						("", com.module_lut#find mdep.md_path)
-				in
-				let file = Path.UniqueKey.lazy_path m2.m_extra.m_file in
-				print "\t%s%s\n" ctx file;
-				let l = try Hashtbl.find dep file with Not_found -> [] in
-				Hashtbl.replace dep file (m :: l)
-			) m.m_extra.m_deps;
-		) com.Common.modules;
-		close();
-		let dump_dependants_path = [dump_path com;target_name;"dependants"] in
-		let buf,close = create_dumpfile [] dump_dependants_path in
-		let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
-		Hashtbl.iter (fun n ml ->
-			print "%s:\n" n;
-			List.iter (fun m ->
-				print "\t%s\n" (Path.UniqueKey.lazy_path m.m_extra.m_file);
-			) ml;
-		) dep;
-		close()
-end
-
 (*
 	Build a default safe-cast expression :
 	{ var $t = <e>; if( Std.is($t,<t>) ) $t else throw "Class cast error"; }

+ 223 - 0
src/codegen/dump.ml

@@ -0,0 +1,223 @@
+open Globals
+open Common
+open Type
+
+let dump_path defines =
+	Define.defined_value_safe ~default:"dump" defines Define.DumpPath
+
+(*
+	Make a dump of the full typed AST of all types
+*)
+let create_dumpfile acc l =
+	let ch = Path.create_file false ".dump" acc l in
+	let buf = Buffer.create 0 in
+	buf, (fun () ->
+		output_string ch (Buffer.contents buf);
+		close_out ch)
+
+let create_dumpfile_from_path com path =
+	let buf,close = create_dumpfile [] ((dump_path com.defines) :: (platform_name_macro com) :: fst path @ [snd path]) in
+	buf,close
+
+let dump_types com pretty =
+	let print_ids = not (Common.defined com Define.DumpIgnoreVarIds) in
+	let restore =
+		if not pretty then
+			let old = !TPrinting.MonomorphPrinting.show_mono_ids in
+			TPrinting.MonomorphPrinting.show_mono_ids := print_ids;
+			fun () -> TPrinting.MonomorphPrinting.show_mono_ids := old
+		else fun () -> ()
+	in
+	let s_type = s_type (Type.print_context()) in
+	let s_expr,s_type_param = if not pretty then
+		(Type.s_expr_ast print_ids "\t"),(Printer.s_type_param "")
+	else
+		(Type.s_expr_pretty false "\t" true),(s_type_param s_type)
+	in
+	let params tl = match tl with
+		| [] -> ""
+		| l -> Printf.sprintf "<%s>" (String.concat ", " (List.map s_type_param l))
+	in
+	let f mt =
+		let path = Type.t_path mt in
+		let buf,close = create_dumpfile_from_path com path in
+		let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
+		let s_metas ml tabs =
+			let args el =
+				match el with
+				| [] -> ""
+				| el -> Printf.sprintf "(%s)" (String.concat ", " (List.map (fun e -> Ast.Printer.s_expr e) el)) in
+			match ml with
+			| [] -> ""
+			| ml -> String.concat " " (List.map (fun me -> match me with (m,el,_) -> "@" ^ Meta.to_string m ^ args el) ml) ^ "\n" ^ tabs in
+		(match mt with
+		| Type.TClassDecl c ->
+			let s_cf_expr f =
+				match f.cf_expr with
+				| None -> ""
+				| Some e -> Printf.sprintf "%s" (s_expr s_type e) in
+			let is_inline_var v : bool = v = Var { v_read = AccInline; v_write = AccNever } in
+			let rec print_field stat f =
+				print "\n\t%s%s%s%s%s %s%s"
+					(s_metas f.cf_meta "\t")
+					(if (has_class_field_flag f CfPublic && not ((has_class_flag c CExtern) || (has_class_flag c CInterface))) then "public " else "")
+					(if stat then "static " else "")
+					(match f.cf_kind with
+						| Var v when (is_inline_var f.cf_kind) -> "inline "
+						| Var v -> ""
+						| Method m ->
+							match m with
+							| MethNormal -> ""
+							| MethDynamic -> "dynamic "
+							| MethInline -> "inline "
+							| MethMacro -> "macro ")
+					(match f.cf_kind with Var v -> "var" | Method m -> "function")
+					(f.cf_name ^ match f.cf_kind with
+						| Var { v_read = AccNormal; v_write = AccNormal } -> ""
+						| Var v when (is_inline_var f.cf_kind) -> ""
+						| Var v -> "(" ^ s_access true v.v_read ^ "," ^ s_access false v.v_write ^ ")"
+						| _ -> "")
+					(params f.cf_params);
+				(match f.cf_kind with
+					| Var v -> print ":%s%s;" (s_type f.cf_type)
+						(match f.cf_expr with
+						| None -> ""
+						| Some e -> " = " ^ (s_cf_expr f));
+					| Method m -> if ((has_class_flag c CExtern) || (has_class_flag c CInterface)) then (
+						match f.cf_type with
+						| TFun(al,t) -> print "(%s):%s;" (String.concat ", " (
+							List.map (fun (n,o,t) -> n ^ ":" ^ (s_type t)) al))
+							(s_type t)
+						| _ -> ()
+					) else print "%s" (s_cf_expr f));
+				print "\n";
+				List.iter (fun f -> print_field stat f) f.cf_overloads
+			in
+			print "%s%s%s%s %s%s" (s_metas c.cl_meta "") (if c.cl_private then "private " else "") (if (has_class_flag c CExtern) then "extern " else "") (if (has_class_flag c CInterface) then "interface" else "class") (s_type_path path) (params c.cl_params);
+			(match c.cl_super with None -> () | Some (c,pl) -> print " extends %s" (s_type (TInst (c,pl))));
+			List.iter (fun (c,pl) -> print " implements %s" (s_type (TInst (c,pl)))) c.cl_implements;
+			(match c.cl_array_access with None -> () | Some t -> print " implements ArrayAccess<%s>" (s_type t));
+			print " {\n";
+			(match c.cl_constructor with
+			| None -> ()
+			| Some f -> print_field false f);
+			List.iter (print_field false) c.cl_ordered_fields;
+			List.iter (print_field true) c.cl_ordered_statics;
+			(match TClass.get_cl_init c with
+			| None -> ()
+			| Some e ->
+				print "\n\tstatic function __init__() ";
+				print "%s" (s_expr s_type e);
+				print "\n");
+			print "}";
+		| Type.TEnumDecl e ->
+			print "%s%s%senum %s%s {\n" (s_metas e.e_meta "") (if e.e_private then "private " else "") (if has_enum_flag e EnExtern then "extern " else "") (s_type_path path) (params e.e_params);
+			List.iter (fun n ->
+				let f = PMap.find n e.e_constrs in
+				print "\t%s%s;\n" f.ef_name (
+					match f.ef_type with
+					| TFun (al,t) -> Printf.sprintf "(%s)" (String.concat ", "
+						(List.map (fun (n,o,t) -> (if o then "?" else "") ^ n ^ ":" ^ (s_type t)) al))
+					| _ -> "")
+			) e.e_names;
+			print "}"
+		| Type.TTypeDecl t ->
+			print "%s%stypedef %s%s = %s" (s_metas t.t_meta "") (if t.t_private then "private " else "") (s_type_path path) (params t.t_params) (s_type t.t_type);
+		| Type.TAbstractDecl a ->
+			print "%s%sabstract %s%s%s%s {}" (s_metas a.a_meta "") (if a.a_private then "private " else "") (s_type_path path) (params a.a_params)
+			(String.concat " " (List.map (fun t -> " from " ^ s_type t) a.a_from))
+			(String.concat " " (List.map (fun t -> " to " ^ s_type t) a.a_to));
+		);
+		close()
+	in
+	Parallel.run_in_new_pool com.timer_ctx (fun pool ->
+		Parallel.ParallelArray.iter pool f (Array.of_list com.types)
+	);
+	restore()
+
+let dump_record com =
+	let f mt =
+		let buf,close = create_dumpfile_from_path com (t_path mt) in
+		let s = match mt with
+			| TClassDecl c -> Printer.s_tclass "" c
+			| TEnumDecl en -> Printer.s_tenum "" en
+			| TTypeDecl t -> Printer.s_tdef "" t
+			| TAbstractDecl a -> Printer.s_tabstract "" a
+		in
+		Buffer.add_string buf s;
+		close()
+	in
+	Parallel.run_in_new_pool com.timer_ctx (fun pool ->
+		Parallel.ParallelArray.iter pool f (Array.of_list com.types)
+	)
+
+let dump_position com =
+	let f mt =
+		match mt with
+			| TClassDecl c ->
+				let buf,close = create_dumpfile_from_path com (t_path mt) in
+				Printf.bprintf buf "%s\n" (s_type_path c.cl_path);
+				let field cf =
+					Printf.bprintf buf "\t%s\n" cf.cf_name;
+					begin match cf.cf_expr with
+					| None -> ()
+					| Some e ->
+						Printf.bprintf buf "%s\n" (Texpr.dump_with_pos "\t" e);
+					end
+				in
+				Option.may field c.cl_constructor;
+				List.iter field c.cl_ordered_statics;
+				List.iter field c.cl_ordered_fields;
+				close();
+			| _ ->
+				()
+	in
+	Parallel.run_in_new_pool com.timer_ctx (fun pool ->
+		Parallel.ParallelArray.iter pool f (Array.of_list com.types)
+	)
+
+let dump_types com =
+	match Common.defined_value_safe com Define.Dump with
+		| "pretty" -> dump_types com true
+		| "record" -> dump_record com
+		| "position" -> dump_position com
+		| _ -> dump_types com false
+
+let dump_dependencies ?(target_override=None) com =
+	let target_name = match target_override with
+		| None -> platform_name_macro com
+		| Some s -> s
+	in
+	let dump_dependencies_path = [dump_path com.defines;target_name;"dependencies"] in
+	let buf,close = create_dumpfile [] dump_dependencies_path in
+	let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
+	let dep = Hashtbl.create 0 in
+	List.iter (fun m ->
+		print "%s:\n" (Path.UniqueKey.lazy_path m.m_extra.m_file);
+		PMap.iter (fun _ mdep ->
+			let com,ctx = match mdep.md_kind with
+				| MMacro when not com.is_macro_context ->
+					Option.get (com.get_macros()), "[macro] "
+				| _ -> com, ""
+			in
+			let mdep_extra =
+				try (com.module_lut#find mdep.md_path).m_extra
+				with Not_found -> (com.cs#get_context mdep.md_sign)#find_module_extra mdep.md_path
+			in
+			let file = Path.UniqueKey.lazy_path mdep_extra.m_file in
+			print "\t%s%s\n" ctx file;
+			let l = try Hashtbl.find dep file with Not_found -> [] in
+			Hashtbl.replace dep file (m :: l)
+		) m.m_extra.m_deps;
+	) com.Common.modules;
+	close();
+	let dump_dependants_path = [dump_path com.defines;target_name;"dependants"] in
+	let buf,close = create_dumpfile [] dump_dependants_path in
+	let print fmt = Printf.kprintf (fun s -> Buffer.add_string buf s) fmt in
+	Hashtbl.iter (fun n ml ->
+		print "%s:\n" n;
+		List.iter (fun m ->
+			print "\t%s\n" (Path.UniqueKey.lazy_path m.m_extra.m_file);
+		) ml;
+	) dep;
+	close()

+ 132 - 0
src/codegen/fixOverrides.ml

@@ -0,0 +1,132 @@
+open Globals
+open Common
+open Type
+
+(* -------------------------------------------------------------------------- *)
+(* FIX OVERRIDES *)
+
+(*
+	on some platforms which doesn't support type parameters, we must have the
+	exact same type for overridden/implemented function as the original one
+*)
+
+let rec find_field com c f =
+	try
+		(match c.cl_super with
+		| None ->
+			raise Not_found
+		| Some ( {cl_path = (["cpp"],"FastIterator")}, _ ) ->
+			raise Not_found (* This is a strongly typed 'extern' and the usual rules don't apply *)
+		| Some (c,_) ->
+			find_field com c f)
+	with Not_found -> try
+		if com.platform = Cpp || com.platform = Hl then (* uses delegation for interfaces *)
+			raise Not_found;
+		let rec loop = function
+			| [] ->
+				raise Not_found
+			| (c,_) :: l ->
+				try
+					find_field com c f
+				with
+					Not_found -> loop l
+		in
+		loop c.cl_implements
+	with Not_found ->
+		let f = PMap.find f.cf_name c.cl_fields in
+		(match f.cf_kind with Var { v_read = AccRequire _ } -> raise Not_found | _ -> ());
+		f
+
+let fix_override com c f fd =
+	let f2 = (try Some (find_field com c f) with Not_found -> None) in
+	match f2,fd with
+		| Some (f2), Some(fd) ->
+			let targs, tret = (match follow f2.cf_type with TFun (args,ret) -> args, ret | _ -> die "" __LOC__) in
+			let changed_args = ref [] in
+			let prefix = "_tmp_" in
+			let nargs = List.map2 (fun ((v,ct) as cur) (_,_,t2) ->
+				try
+					type_eq EqStrict (monomorphs c.cl_params (monomorphs f.cf_params v.v_type)) t2;
+					(* Flash generates type parameters with a single constraint as that constraint type, so we
+					   have to detect this case and change the variable (issue #2712). *)
+					begin match follow v.v_type with
+						| TInst({cl_kind = KTypeParameter ttp} as cp,_) when com.platform = Flash ->
+							begin match get_constraints ttp with
+							| [tc] ->
+								if List.exists (fun tp -> tp.ttp_name = (snd cp.cl_path)) c.cl_params then raise (Unify_error [])
+							| _ ->
+								()
+							end
+						| _ ->
+							()
+					end;
+					cur
+				with Unify_error _ ->
+					let v2 = alloc_var VGenerated (prefix ^ v.v_name) t2 v.v_pos in
+					changed_args := (v,v2) :: !changed_args;
+					v2,ct
+			) fd.tf_args targs in
+			let fd2 = {
+				tf_args = nargs;
+				tf_type = tret;
+				tf_expr = (match List.rev !changed_args with
+					| [] -> fd.tf_expr
+					| args ->
+						let e = fd.tf_expr in
+						let el = (match e.eexpr with TBlock el -> el | _ -> [e]) in
+						let p = (match el with [] -> e.epos | e :: _ -> e.epos) in
+						let el_v = List.map (fun (v,v2) ->
+							mk (TVar (v,Some (mk (TCast (mk (TLocal v2) v2.v_type p,None)) v.v_type p))) com.basic.tvoid p
+						) args in
+						{ e with eexpr = TBlock (el_v @ el) }
+				);
+			} in
+			let targs = List.map (fun(v,c) -> (v.v_name, Option.is_some c, v.v_type)) nargs in
+			let fde = (match f.cf_expr with None -> die "" __LOC__ | Some e -> e) in
+			f.cf_expr <- Some { fde with eexpr = TFunction fd2 };
+			f.cf_type <- TFun(targs,tret);
+		| Some(f2), None when (has_class_flag c CInterface) ->
+			let targs, tret = (match follow f2.cf_type with TFun (args,ret) -> args, ret | _ -> die "" __LOC__) in
+			f.cf_type <- TFun(targs,tret)
+		| _ ->
+			()
+
+let fix_overrides com t =
+	match t with
+	| TClassDecl c ->
+		(* overrides can be removed from interfaces *)
+		if (has_class_flag c CInterface) then
+			c.cl_ordered_fields <- List.filter (fun f ->
+				try
+					if find_field com c f == f then raise Not_found;
+					c.cl_fields <- PMap.remove f.cf_name c.cl_fields;
+					false;
+				with Not_found ->
+					true
+			) c.cl_ordered_fields;
+		List.iter (fun f ->
+			match f.cf_expr, f.cf_kind with
+			| Some { eexpr = TFunction fd }, Method (MethNormal | MethInline) ->
+				fix_override com c f (Some fd)
+			| None, Method (MethNormal | MethInline) when (has_class_flag c CInterface) ->
+				fix_override com c f None
+			| _ ->
+				()
+		) c.cl_ordered_fields
+	| _ ->
+		()
+
+(*
+	PHP does not allow abstract classes extending other abstract classes to override any fields, so these duplicates
+	must be removed from the child interface
+*)
+let fix_abstract_inheritance com t =
+	match t with
+	| TClassDecl c when (has_class_flag c CInterface) ->
+		c.cl_ordered_fields <- List.filter (fun f ->
+			let b = try (find_field com c f) == f
+			with Not_found -> false in
+			if not b then c.cl_fields <- PMap.remove f.cf_name c.cl_fields;
+			b;
+		) c.cl_ordered_fields
+	| _ -> ()

+ 16 - 13
src/codegen/genxml.ml

@@ -212,7 +212,7 @@ let rec exists f c =
 			| None -> false
 			| Some (csup,_) -> exists f csup
 
-let rec gen_type_decl com pos t =
+let rec gen_type_decl pos t =
 	let m = (t_infos t).mt_module in
 	match t with
 	| TClassDecl c ->
@@ -257,7 +257,7 @@ let rec gen_type_decl com pos t =
 		let mk_field_cast (t,cf) = if Meta.has Meta.NoDoc cf.cf_meta then None else Some (node "icast" ["field",cf.cf_name] [gen_type t]) in
 		let sub = (match a.a_from,a.a_from_field with [],[] -> [] | l1,l2 -> [node "from" [] ((List.map mk_cast l1) @ (ExtList.List.filter_map mk_field_cast l2))]) in
 		let super = (match a.a_to,a.a_to_field with [],[] -> [] | l1,l2 -> [node "to" [] ((List.map mk_cast l1) @ (ExtList.List.filter_map mk_field_cast l2))]) in
-		let impl = (match a.a_impl with None -> [] | Some c -> [node "impl" [] [gen_type_decl com pos (TClassDecl c)]]) in
+		let impl = (match a.a_impl with None -> [] | Some c -> [node "impl" [] [gen_type_decl pos (TClassDecl c)]]) in
 		let this = [node "this" [] [gen_type a.a_this]] in
 		node "abstract" (gen_type_params pos a.a_private (tpath t) a.a_params a.a_pos m) (sub @ this @ super @ doc @ meta @ impl)
 
@@ -288,18 +288,21 @@ let rec write_xml ch tabs x =
 		IO.printf ch "<![CDATA[%s]]>" s
 
 let generate com file =
-	let t = Timer.timer ["generate";"xml"] in
-	let x = node "haxe" [] (List.map (gen_type_decl com true) (List.filter (fun t -> not (Meta.has Meta.NoDoc (t_infos t).mt_meta)) com.types)) in
-	t();
-	let t = Timer.timer ["write";"xml"] in
-	let ch = IO.output_channel (open_out_bin file) in
-	IO.printf ch "<!-- This file can be parsed by haxe.rtti.XmlParser -->\n";
-	write_xml ch "" x;
-	IO.close_out ch;
-	t()
+	let f () =
+		node "haxe" [] (List.map (gen_type_decl true) (List.filter (fun t -> not (Meta.has Meta.NoDoc (t_infos t).mt_meta)) com.types))
+	in
+	let x = Timer.time com.timer_ctx ["generate";"xml"] f () in
+
+	let f () =
+		let ch = IO.output_channel (open_out_bin file) in
+		IO.printf ch "<!-- This file can be parsed by haxe.rtti.XmlParser -->\n";
+		write_xml ch "" x;
+		IO.close_out ch;
+	in
+	Timer.time com.timer_ctx ["write";"xml"] f ()
 
-let gen_type_string ctx t =
-	let x = gen_type_decl ctx false t in
+let gen_type_string t =
+	let x = gen_type_decl false t in
 	let ch = IO.output_string() in
 	write_xml ch "" x;
 	IO.close_out ch

+ 26 - 25
src/codegen/javaModern.ml

@@ -1018,7 +1018,7 @@ module Converter = struct
 		(pack,types)
 end
 
-class java_library_modern com name file_path = object(self)
+class java_library_modern com  name file_path = object(self)
 	inherit [java_lib_type,unit] native_library name file_path as super
 
 
@@ -1028,35 +1028,36 @@ class java_library_modern com name file_path = object(self)
 	val mutable loaded = false
 	val mutable closed = false
 
+	method private do_load =
+		List.iter (function
+		| ({ Zip.is_directory = false; Zip.filename = filename } as entry) when String.ends_with filename ".class" ->
+			let pack = String.nsplit filename "/" in
+			begin match List.rev pack with
+				| [] -> ()
+				| name :: pack ->
+					let name = String.sub name 0 (String.length name - 6) in
+					let pack = List.rev pack in
+					let pack,(mname,tname) = PathConverter.jpath_to_hx (pack,name) in
+					let path = PathConverter.jpath_to_path (pack,(mname,tname)) in
+					let mname = match mname with
+						| None ->
+							cached_files <- path :: cached_files;
+							tname
+						| Some mname -> mname
+					in
+					Hashtbl.add modules (pack,mname) (filename,entry);
+				end
+		| _ -> ()
+	) (Zip.entries (Lazy.force zip));
+
 	method load =
 		if not loaded then begin
 			loaded <- true;
-			let close = Timer.timer ["jar";"load"] in
-			List.iter (function
-				| ({ Zip.is_directory = false; Zip.filename = filename } as entry) when String.ends_with filename ".class" ->
-					let pack = String.nsplit filename "/" in
-					begin match List.rev pack with
-						| [] -> ()
-						| name :: pack ->
-							let name = String.sub name 0 (String.length name - 6) in
-							let pack = List.rev pack in
-							let pack,(mname,tname) = PathConverter.jpath_to_hx (pack,name) in
-							let path = PathConverter.jpath_to_path (pack,(mname,tname)) in
-							let mname = match mname with
-								| None ->
-									cached_files <- path :: cached_files;
-									tname
-								| Some mname -> mname
-							in
-							Hashtbl.add modules (pack,mname) (filename,entry);
-						end
-				| _ -> ()
-			) (Zip.entries (Lazy.force zip));
-			close();
+			Timer.time com.Common.timer_ctx ["jar";"load"] (fun () -> self#do_load) ()
 		end
 
 	method private read zip (filename,entry) =
-		Std.finally (Timer.timer ["jar";"read"]) (fun () ->
+		Timer.time com.Common.timer_ctx ["jar";"read"] (fun () ->
 			let data = Zip.read_entry zip entry in
 			let jc = JReaderModern.parse_class (IO.input_string data) in
 			(jc,file_path,file_path ^ "@" ^ filename)
@@ -1084,7 +1085,7 @@ class java_library_modern com name file_path = object(self)
 					if entries = [] then raise Not_found;
 					let zip = Lazy.force zip in
 					let jcs = List.map (self#read zip) entries in
-					Std.finally (Timer.timer ["jar";"convert"]) (fun () ->
+					Timer.time com.Common.timer_ctx ["jar";"convert"] (fun () ->
 						Some (Converter.convert_module (fst path) jcs)
 					) ();
 				with Not_found ->

+ 8 - 6
src/codegen/swfLoader.ml

@@ -456,7 +456,6 @@ let build_class com c file =
 	(path.tpackage, [(EClass class_data,pos)])
 
 let extract_data (_,tags) =
-	let t = Timer.timer ["read";"swf"] in
 	let h = Hashtbl.create 0 in
 	let loop_field f =
 		match f.hlf_kind with
@@ -474,9 +473,11 @@ let extract_data (_,tags) =
 			List.iter (fun i -> Array.iter loop_field i.hls_fields) (As3hlparse.parse as3)
 		| _ -> ()
 	) tags;
-	t();
 	h
 
+let extract_data com arg =
+	Timer.time com.timer_ctx ["read";"swf"] extract_data arg
+
 let remove_debug_infos as3 =
 	let hl = As3hlparse.parse as3 in
 	let methods = Hashtbl.create 0 in
@@ -547,8 +548,7 @@ let remove_debug_infos as3 =
 	in
 	As3hlparse.flatten (List.map loop_static hl)
 
-let parse_swf com file =
-	let t = Timer.timer ["read";"swf"] in
+let parse_swf file =
 	let is_swc = Path.file_extension file = "swc" || Path.file_extension file = "ane" in
 	let ch = if is_swc then begin
 		let zip = Zip.open_in file in
@@ -577,9 +577,11 @@ let parse_swf com file =
 			t.tdata <- TActionScript3 (id,remove_debug_infos as3)
 		| _ -> ()
 	) tags;
-	t();
 	(h,tags)
 
+let parse_swf com file =
+	Timer.time com.timer_ctx ["read";"swf"] parse_swf file
+
 class swf_library com name file_path = object(self)
 	inherit [swf_lib_type,Swf.swf] native_library name file_path
 
@@ -600,7 +602,7 @@ class swf_library com name file_path = object(self)
 
 	method extract = match swf_classes with
 		| None ->
-			let d = extract_data self#get_swf in
+			let d = extract_data com self#get_swf in
 			swf_classes <- Some d;
 			d
 		| Some d ->

+ 10 - 7
src/compiler/args.ml

@@ -43,7 +43,7 @@ let process_args arg_spec =
 let parse_args com =
 	let usage = Printf.sprintf
 		"Haxe Compiler %s - (C)2005-2024 Haxe Foundation\nUsage: haxe%s <target> [options] [hxml files and dot paths...]\n"
-		s_version_full (if Sys.os_type = "Win32" then ".exe" else "")
+		(s_version_full com.version) (if Sys.os_type = "Win32" then ".exe" else "")
 	in
 	let actx = {
 		classes = [([],"Std")];
@@ -64,6 +64,7 @@ let parse_args com =
 		raise_usage = (fun () -> ());
 		display_arg = None;
 		deprecations = [];
+		measure_times = false;
 	} in
 	let add_deprecation s =
 		actx.deprecations <- s :: actx.deprecations
@@ -104,9 +105,9 @@ let parse_args com =
 		),"<name[=path]>","generate code for a custom target");
 		("Target",[],["-x"], Arg.String (fun cl ->
 			let cpath = Path.parse_type_path cl in
-			(match com.main.main_class with
+			(match com.main.main_path with
 				| Some c -> if cpath <> c then raise (Arg.Bad "Multiple --main classes specified")
-				| None -> com.main.main_class <- Some cpath);
+				| None -> com.main.main_path <- Some cpath);
 			actx.classes <- cpath :: actx.classes;
 			Common.define com Define.Interp;
 			set_platform com Eval "";
@@ -131,9 +132,9 @@ let parse_args com =
 			actx.hxb_libs <- lib :: actx.hxb_libs
 		),"<path>","add a hxb library");
 		("Compilation",["-m";"--main"],["-main"],Arg.String (fun cl ->
-			if com.main.main_class <> None then raise (Arg.Bad "Multiple --main classes specified");
+			if com.main.main_path <> None then raise (Arg.Bad "Multiple --main classes specified");
 			let cpath = Path.parse_type_path cl in
-			com.main.main_class <- Some cpath;
+			com.main.main_path <- Some cpath;
 			actx.classes <- cpath :: actx.classes
 		),"<class>","select startup class");
 		("Compilation",["-L";"--library"],["-lib"],Arg.String (fun _ -> ()),"<name[:ver]>","use a haxelib library");
@@ -154,7 +155,7 @@ let parse_args com =
 			com.debug <- true;
 		),"","add debug information to the compiled code");
 		("Miscellaneous",["--version"],["-version"],Arg.Unit (fun() ->
-			raise (Helper.HelpMessage s_version_full);
+			raise (Helper.HelpMessage (s_version_full com.version));
 		),"","print version and exit");
 		("Miscellaneous", ["-h";"--help"], ["-help"], Arg.Unit (fun () ->
 			raise (Arg.Help "")
@@ -261,7 +262,9 @@ let parse_args com =
 			actx.hxb_out <- Some file;
 		),"<file>", "generate haxe binary representation to target archive");
 		("Optimization",["--no-output"],[], Arg.Unit (fun() -> actx.no_output <- true),"","compiles but does not generate any file");
-		("Debug",["--times"],[], Arg.Unit (fun() -> Timer.measure_times := true),"","measure compilation times");
+		("Debug",["--times"],[], Arg.Unit (fun() ->
+			actx.measure_times <- true
+		),"","measure compilation times");
 		("Optimization",["--no-inline"],[],Arg.Unit (fun () ->
 			add_deprecation "--no-inline has been deprecated, use -D no-inline instead";
 			Common.define com Define.NoInline

+ 32 - 19
src/compiler/compilationCache.ml

@@ -36,7 +36,6 @@ class context_cache (index : int) (sign : Digest.t) = object(self)
 	val modules : (path,module_def) Hashtbl.t = Hashtbl.create 0
 	val binary_cache : (path,HxbData.module_cache) Hashtbl.t = Hashtbl.create 0
 	val tmp_binary_cache : (path,HxbData.module_cache) Hashtbl.t = Hashtbl.create 0
-	val string_pool  = StringPool.create ()
 	val removed_files = Hashtbl.create 0
 	val mutable json = JNull
 	val mutable initialized = false
@@ -81,20 +80,25 @@ class context_cache (index : int) (sign : Digest.t) = object(self)
 		try (Hashtbl.find modules path).m_extra
 		with Not_found -> (self#get_hxb_module path).mc_extra
 
-	method cache_hxb_module config warn anon_identification path m =
+	method add_binary_cache m chunks =
+		Hashtbl.replace binary_cache m.m_path {
+			mc_path = m.m_path;
+			mc_id = m.m_id;
+			mc_chunks = chunks;
+			mc_extra = { m.m_extra with m_cache_state = MSGood; m_display_deps = None }
+		}
+
+	method cache_hxb_module config warn anon_identification m =
 		match m.m_extra.m_kind with
 		| MImport ->
-			Hashtbl.add modules m.m_path m
+			Hashtbl.add modules m.m_path m;
+			None
 		| _ ->
-			let writer = HxbWriter.create config (Some string_pool) warn anon_identification in
-			HxbWriter.write_module writer m;
-			let chunks = HxbWriter.get_chunks writer in
-			Hashtbl.replace binary_cache path {
-				mc_path = path;
-				mc_id = m.m_id;
-				mc_chunks = chunks;
-				mc_extra = { m.m_extra with m_cache_state = MSGood }
-			}
+			Some (fun () ->
+				let writer = HxbWriter.create config warn anon_identification in
+				HxbWriter.write_module writer m;
+				HxbWriter.get_chunks writer
+			)
 
 	method cache_module_in_memory path m =
 		Hashtbl.replace modules path m
@@ -117,8 +121,6 @@ class context_cache (index : int) (sign : Digest.t) = object(self)
 	method get_modules = modules
 
 	method get_hxb = binary_cache
-	method get_string_pool = string_pool
-	method get_string_pool_arr = string_pool.items.arr
 
 	(* TODO handle hxb cache there too *)
 	method get_removed_files = removed_files
@@ -140,8 +142,7 @@ class virtual server_task (id : string list) (priority : int) = object(self)
 	method private virtual execute : unit
 
 	method run : unit =
-		let t = Timer.timer ("server" :: "task" :: id) in
-		Std.finally t (fun () -> self#execute) ()
+		self#execute
 
 	method get_priority = priority
 	method get_id = id
@@ -234,15 +235,27 @@ class cache = object(self)
 			) cc#get_modules acc
 		) contexts []
 
+	method taint_module m_path reason =
+		Hashtbl.iter (fun _ cc ->
+			Hashtbl.iter (fun _ m ->
+				if m.m_path = m_path then m.m_extra.m_cache_state <- MSBad (Tainted reason)
+			) cc#get_modules;
+			Hashtbl.iter (fun _ mc ->
+				if mc.HxbData.mc_path = m_path then
+					mc.HxbData.mc_extra.m_cache_state <- match reason, mc.mc_extra.m_cache_state with
+					| CheckDisplayFile, (MSBad _ as state) -> state
+					| _ -> MSBad (Tainted reason)
+			) cc#get_hxb
+		) contexts
+
 	method taint_modules file_key reason =
 		Hashtbl.iter (fun _ cc ->
 			Hashtbl.iter (fun _ m ->
 				if Path.UniqueKey.lazy_key m.m_extra.m_file = file_key then m.m_extra.m_cache_state <- MSBad (Tainted reason)
 			) cc#get_modules;
-			let open HxbData in
 			Hashtbl.iter (fun _ mc ->
-				if Path.UniqueKey.lazy_key mc.mc_extra.m_file = file_key then
-					mc.mc_extra.m_cache_state <- match reason, mc.mc_extra.m_cache_state with
+				if Path.UniqueKey.lazy_key mc.HxbData.mc_extra.m_file = file_key then
+					mc.HxbData.mc_extra.m_cache_state <- match reason, mc.HxbData.mc_extra.m_cache_state with
 					| CheckDisplayFile, (MSBad _ as state) -> state
 					| _ -> MSBad (Tainted reason)
 			) cc#get_hxb

+ 3 - 1
src/compiler/compilationContext.ml

@@ -37,13 +37,14 @@ type arg_context = {
 	mutable raise_usage : unit -> unit;
 	mutable display_arg : string option;
 	mutable deprecations : string list;
+	mutable measure_times : bool;
 }
 
 type communication = {
 	write_out : string -> unit;
 	write_err : string -> unit;
 	flush     : compilation_context -> unit;
-	exit      : int -> unit;
+	exit      : Timer.timer_context -> int -> unit;
 	is_server : bool;
 }
 
@@ -54,6 +55,7 @@ and compilation_context = {
 	mutable has_error : bool;
 	comm : communication;
 	mutable runtime_args : string list;
+	timer_ctx : Timer.timer_context;
 }
 
 type compilation_callbacks = {

+ 88 - 56
src/compiler/compiler.ml

@@ -2,16 +2,16 @@ open Globals
 open Common
 open CompilationContext
 
+let handle_diagnostics ctx msg p kind =
+	ctx.has_error <- true;
+	add_diagnostics_message ctx.com msg p kind Error;
+	match ctx.com.report_mode with
+	| RMLegacyDiagnostics _ -> DisplayOutput.emit_legacy_diagnostics ctx.com
+	| RMDiagnostics _ -> DisplayOutput.emit_diagnostics ctx.com
+	| _ -> die "" __LOC__
+
 let run_or_diagnose ctx f =
 	let com = ctx.com in
-	let handle_diagnostics msg p kind =
-		ctx.has_error <- true;
-		add_diagnostics_message com msg p kind Error;
-		match com.report_mode with
-		| RMLegacyDiagnostics _ -> DisplayOutput.emit_legacy_diagnostics ctx.com
-		| RMDiagnostics _ -> DisplayOutput.emit_diagnostics ctx.com
-		| _ -> die "" __LOC__
-	in
 	if is_diagnostics com then begin try
 			f ()
 		with
@@ -25,15 +25,14 @@ let run_or_diagnose ctx f =
 			| RMDiagnostics _ -> DisplayOutput.emit_diagnostics ctx.com
 			| _ -> die "" __LOC__)
 		| Parser.Error(msg,p) ->
-			handle_diagnostics (Parser.error_msg msg) p DKParserError
+			handle_diagnostics ctx (Parser.error_msg msg) p DKParserError
 		| Lexer.Error(msg,p) ->
-			handle_diagnostics (Lexer.error_msg msg) p DKParserError
+			handle_diagnostics ctx (Lexer.error_msg msg) p DKParserError
 		end
 	else
 		f ()
 
 let run_command ctx cmd =
-	let t = Timer.timer ["command";cmd] in
 	(* TODO: this is a hack *)
 	let cmd = if ctx.comm.is_server then begin
 		let h = Hashtbl.create 0 in
@@ -72,9 +71,11 @@ let run_command ctx cmd =
 			result
 		end
 	in
-	t();
 	result
 
+let run_command ctx cmd =
+	Timer.time ctx.timer_ctx ["command";cmd] (run_command ctx) cmd
+
 module Setup = struct
 	let initialize_target ctx com actx =
 		init_platform com;
@@ -230,13 +231,10 @@ module Setup = struct
 	let setup_common_context ctx =
 		let com = ctx.com in
 		ctx.com.print <- ctx.comm.write_out;
-		Common.define_value com Define.HaxeVer (Printf.sprintf "%.3f" (float_of_int Globals.version /. 1000.));
-		Common.raw_define com "haxe3";
-		Common.raw_define com "haxe4";
-		Common.raw_define com "haxe5";
+		Common.define_value com Define.HaxeVer (Printf.sprintf "%.3f" (float_of_int version /. 1000.));
 		Common.define_value com Define.Haxe s_version;
 		Common.raw_define com "true";
-		Common.define_value com Define.Dce "std";
+		List.iter (fun (k,v) -> Define.raw_define_value com.defines k v) DefineList.default_values;
 		com.info <- (fun ?(depth=0) ?(from_macro=false) msg p ->
 			message ctx (make_compiler_message ~from_macro msg p depth DKCompilerMessage Information)
 		);
@@ -273,11 +271,17 @@ end
 
 let check_defines com =
 	if is_next com then begin
-		PMap.iter (fun k _ ->
+		PMap.iter (fun k v ->
 			try
 				let reason = Hashtbl.find Define.deprecation_lut k in
 				let p = fake_pos ("-D " ^ k) in
-				com.warning WDeprecatedDefine [] reason p
+				begin match reason with
+				| DueTo reason ->
+					com.warning WDeprecatedDefine [] reason p
+				| InFavorOf d ->
+					Define.raw_define_value com.defines d v;
+					com.warning WDeprecatedDefine [] (Printf.sprintf "-D %s has been deprecated in favor of -D %s" k d) p
+				end;
 			with Not_found ->
 				()
 		) com.defines.values
@@ -286,7 +290,6 @@ let check_defines com =
 (** Creates the typer context and types [classes] into it. *)
 let do_type ctx mctx actx display_file_dot_path =
 	let com = ctx.com in
-	let t = Timer.timer ["typing"] in
 	let cs = com.cs in
 	CommonCache.maybe_add_context_sign cs com "before_init_macros";
 	enter_stage com CInitMacrosStart;
@@ -296,6 +299,7 @@ let do_type ctx mctx actx display_file_dot_path =
 		Some (MacroContext.call_init_macro ctx.com mctx path)
 	) mctx (List.rev actx.config_macros) in
 	enter_stage com CInitMacrosDone;
+	update_platform_config com; (* make sure to adapt all flags changes defined during init macros *)
 	ServerMessage.compiler_stage com;
 
 	let macros = match mctx with None -> None | Some mctx -> mctx.g.macros in
@@ -309,7 +313,10 @@ let do_type ctx mctx actx display_file_dot_path =
 		com.callbacks#run com.error_ext com.callbacks#get_after_init_macros;
 		run_or_diagnose ctx (fun () ->
 			if com.display.dms_kind <> DMNone then DisplayTexpr.check_display_file tctx cs;
-			List.iter (fun cpath -> ignore(tctx.Typecore.g.Typecore.do_load_module tctx cpath null_pos)) (List.rev actx.classes);
+			List.iter (fun cpath ->
+				ignore(tctx.Typecore.g.Typecore.do_load_module tctx cpath null_pos);
+				Typecore.flush_pass tctx.g PBuildClass "actx.classes"
+			) (List.rev actx.classes);
 			Finalization.finalize tctx;
 		);
 	end with TypeloadParse.DisplayInMacroBlock ->
@@ -323,26 +330,27 @@ let do_type ctx mctx actx display_file_dot_path =
 		| (DMUsage _ | DMImplementation) -> FindReferences.find_possible_references tctx cs;
 		| _ -> ()
 	end;
-	t();
 	(tctx, display_file_dot_path)
 
 let finalize_typing ctx tctx =
-	let t = Timer.timer ["finalize"] in
 	let com = ctx.com in
 	let main_module = Finalization.maybe_load_main tctx in
 	enter_stage com CFilteringStart;
 	ServerMessage.compiler_stage com;
-	let main, types, modules = run_or_diagnose ctx (fun () -> Finalization.generate tctx main_module) in
-	com.main.main_expr <- main;
+	let (main_expr,main_file),types,modules = run_or_diagnose ctx (fun () -> Finalization.generate tctx main_module) in
+	com.main.main_expr <- main_expr;
+	com.main.main_file <- main_file;
 	com.types <- types;
-	com.modules <- modules;
-	t()
+	com.modules <- modules
+
+let finalize_typing ctx tctx =
+	Timer.time ctx.timer_ctx ["finalize"] (finalize_typing ctx) tctx
 
 let filter ctx tctx ectx before_destruction =
-	let t = Timer.timer ["filters"] in
-	DeprecationCheck.run ctx.com;
-	run_or_diagnose ctx (fun () -> Filters.run tctx ectx ctx.com.main.main_expr before_destruction);
-	t()
+	Timer.time ctx.timer_ctx ["filters"] (fun () ->
+		DeprecationCheck.run ctx.com;
+		run_or_diagnose ctx (fun () -> Filters.run tctx ectx ctx.com.main.main_expr before_destruction)
+	) ()
 
 let compile ctx actx callbacks =
 	let com = ctx.com in
@@ -366,24 +374,24 @@ let compile ctx actx callbacks =
 	let ext = Setup.initialize_target ctx com actx in
 	update_platform_config com; (* make sure to adapt all flags changes defined after platform *)
 	callbacks.after_target_init ctx;
-	let t = Timer.timer ["init"] in
-	List.iter (fun f -> f()) (List.rev (actx.pre_compilation));
-	begin match actx.hxb_out with
-		| None ->
-			()
-		| Some file ->
-			com.hxb_writer_config <- HxbWriterConfig.process_argument file
-	end;
-	t();
+	Timer.time ctx.timer_ctx ["init"] (fun () ->
+		List.iter (fun f -> f()) (List.rev (actx.pre_compilation));
+		begin match actx.hxb_out with
+			| None ->
+				()
+			| Some file ->
+				com.hxb_writer_config <- HxbWriterConfig.process_argument file
+		end;
+	) ();
 	enter_stage com CInitialized;
 	ServerMessage.compiler_stage com;
 	if actx.classes = [([],"Std")] && not actx.force_typing then begin
 		if actx.cmds = [] && not actx.did_something then actx.raise_usage();
 	end else begin
 		(* Actual compilation starts here *)
-		let (tctx,display_file_dot_path) = do_type ctx mctx actx display_file_dot_path in
+		let (tctx,display_file_dot_path) = Timer.time ctx.timer_ctx ["typing"] (do_type ctx mctx actx) display_file_dot_path in
 		DisplayProcessing.handle_display_after_typing ctx tctx display_file_dot_path;
-		let ectx = Exceptions.create_exception_context tctx in
+		let ectx = ExceptionInit.create_exception_context tctx in
 		finalize_typing ctx tctx;
 		let is_compilation = is_compilation com in
 		com.callbacks#add_after_save (fun () ->
@@ -395,10 +403,10 @@ let compile ctx actx callbacks =
 					()
 		);
 		if is_diagnostics com then
-			filter ctx tctx ectx (fun () -> DisplayProcessing.handle_display_after_finalization ctx tctx display_file_dot_path)
+			filter ctx com ectx (fun () -> DisplayProcessing.handle_display_after_finalization ctx tctx display_file_dot_path)
 		else begin
 			DisplayProcessing.handle_display_after_finalization ctx tctx display_file_dot_path;
-			filter ctx tctx ectx (fun () -> ());
+			filter ctx com ectx (fun () -> ());
 		end;
 		if ctx.has_error then raise Abort;
 		if is_compilation then Generate.check_auxiliary_output com actx;
@@ -418,6 +426,10 @@ let compile ctx actx callbacks =
 		) (List.rev actx.cmds)
 	end
 
+let make_ice_message com msg backtrace =
+		let ver = (s_version_full com.version) in
+		let os_type = if Sys.unix then "unix" else "windows" in
+		Printf.sprintf "%s\nHaxe: %s; OS type: %s;\n%s" msg ver os_type backtrace
 let compile_safe ctx f =
 	let com = ctx.com in
 try
@@ -430,7 +442,7 @@ with
 	| Parser.Error (m,p) ->
 		error ctx (Parser.error_msg m) p
 	| Typecore.Forbid_package ((pack,m,p),pl,pf)  ->
-		if !Parser.display_mode <> DMNone && ctx.has_next then begin
+		if ctx.com.display.dms_kind <> DMNone && ctx.has_next then begin
 			ctx.has_error <- false;
 			ctx.messages <- [];
 		end else begin
@@ -441,8 +453,16 @@ with
 		error_ext ctx err
 	| Arg.Bad msg ->
 		error ctx ("Error: " ^ msg) null_pos
+	| Failure msg when is_diagnostics com ->
+		handle_diagnostics ctx msg null_pos DKCompilerMessage;
 	| Failure msg when not Helper.is_debug_run ->
 		error ctx ("Error: " ^ msg) null_pos
+	| Globals.Ice (msg,backtrace) when is_diagnostics com ->
+		let s = make_ice_message com msg backtrace in
+		handle_diagnostics ctx s null_pos DKCompilerMessage
+	| Globals.Ice (msg,backtrace) when not Helper.is_debug_run ->
+		let s = make_ice_message com msg backtrace in
+		error ctx ("Error: " ^ s) null_pos
 	| Helper.HelpMessage msg ->
 		print_endline msg
 	| Parser.TypePath (p,c,is_import,pos) ->
@@ -487,6 +507,8 @@ let catch_completion_and_exit ctx callbacks run =
 			i
 
 let process_actx ctx actx =
+	ctx.com.doinline <- ctx.com.display.dms_inline && not (Common.defined ctx.com Define.NoInline);
+	ctx.timer_ctx.measure_times <- (if actx.measure_times then Yes else No);
 	DisplayProcessing.process_display_arg ctx actx;
 	List.iter (fun s ->
 		ctx.com.warning WDeprecated [] s null_pos
@@ -513,23 +535,30 @@ let compile_ctx callbacks ctx =
 	end else
 		catch_completion_and_exit ctx callbacks run
 
-let create_context comm cs compilation_step params = {
-	com = Common.create compilation_step cs version params (DisplayTypes.DisplayMode.create !Parser.display_mode);
+let create_context comm cs timer_ctx compilation_step params = {
+	com = Common.create timer_ctx compilation_step cs {
+		version = version;
+		major = version_major;
+		minor = version_minor;
+		revision = version_revision;
+		pre = version_pre;
+		extra = Version.version_extra;
+	} params (DisplayTypes.DisplayMode.create DMNone);
 	messages = [];
 	has_next = false;
 	has_error = false;
 	comm = comm;
 	runtime_args = [];
+	timer_ctx = timer_ctx;
 }
 
 module HighLevel = struct
-	let add_libs libs args cs has_display =
+	let add_libs timer_ctx libs args cs has_display =
 		let global_repo = List.exists (fun a -> a = "--haxelib-global") args in
 		let fail msg =
 			raise (Arg.Bad msg)
 		in
 		let call_haxelib() =
-			let t = Timer.timer ["haxelib"] in
 			let cmd = "haxelib" ^ (if global_repo then " --global" else "") ^ " path " ^ String.concat " " libs in
 			let pin, pout, perr = Unix.open_process_full cmd (Unix.environment()) in
 			let lines = Std.input_list pin in
@@ -539,9 +568,11 @@ module HighLevel = struct
 				| [], [] -> "Failed to call haxelib (command not found ?)"
 				| [], [s] when ExtString.String.ends_with (ExtString.String.strip s) "Module not found: path" -> "The haxelib command has been strip'ed, please install it again"
 				| _ -> String.concat "\n" (lines@err));
-			t();
 			lines
 		in
+		let call_haxelib () =
+			Timer.time timer_ctx ["haxelib"] call_haxelib ()
+		in
 		match libs with
 		| [] ->
 			[]
@@ -575,7 +606,7 @@ module HighLevel = struct
 			lines
 
 	(* Returns a list of contexts, but doesn't do anything yet *)
-	let process_params server_api create each_args has_display is_server args =
+	let process_params server_api timer_ctx create each_args has_display is_server args =
 		(* We want the loop below to actually see all the --each params, so let's prepend them *)
 		let args = !each_args @ args in
 		let added_libs = Hashtbl.create 0 in
@@ -633,14 +664,14 @@ module HighLevel = struct
 				let libs,args = find_subsequent_libs [name] args in
 				let libs = List.filter (fun l -> not (Hashtbl.mem added_libs l)) libs in
 				List.iter (fun l -> Hashtbl.add added_libs l ()) libs;
-				let lines = add_libs libs args server_api.cache has_display in
+				let lines = add_libs timer_ctx libs args server_api.cache has_display in
 				loop acc (lines @ args)
 			| ("--jvm" | "-jvm" as arg) :: dir :: args ->
 				loop_lib arg dir "hxjava" acc args
 			| arg :: l ->
 				match List.rev (ExtString.String.nsplit arg ".") with
 				| "hxml" :: _ :: _ when (match acc with "-cmd" :: _ | "--cmd" :: _ -> false | _ -> true) ->
-					let full_path = Extc.get_full_path arg in
+					let full_path = try Extc.get_full_path arg with Failure(_) -> raise (Arg.Bad (Printf.sprintf "File not found: %s" arg)) in
 					if List.mem full_path !hxml_stack then
 						raise (Arg.Bad (Printf.sprintf "Duplicate hxml inclusion: %s" full_path))
 					else
@@ -678,7 +709,8 @@ module HighLevel = struct
 		end
 
 	let entry server_api comm args =
-		let create = create_context comm server_api.cache in
+		let timer_ctx = Timer.make_context (Timer.make ["other"]) in
+		let create = create_context comm server_api.cache timer_ctx in
 		let each_args = ref [] in
 		let curdir = Unix.getcwd () in
 		let has_display = ref false in
@@ -692,7 +724,7 @@ module HighLevel = struct
 		in
 		let rec loop args =
 			let args,server_mode,ctx = try
-				process_params server_api create each_args !has_display comm.is_server args
+				process_params server_api timer_ctx create each_args !has_display comm.is_server args
 			with Arg.Bad msg ->
 				let ctx = create 0 args in
 				error ctx ("Error: " ^ msg) null_pos;
@@ -715,5 +747,5 @@ module HighLevel = struct
 				code
 		in
 		let code = loop args in
-		comm.exit code
+		comm.exit timer_ctx code
 end

+ 7 - 8
src/compiler/displayOutput.ml

@@ -1,7 +1,6 @@
 open Globals
 open Ast
 open Common
-open Timer
 open DisplayTypes.DisplayMode
 open DisplayTypes.CompletionResultKind
 open CompletionItem
@@ -24,14 +23,15 @@ let htmlescape s =
 	let s = String.concat "&quot;" (ExtString.String.nsplit s "\"") in
 	s
 
-let get_timer_fields start_time =
+let get_timer_fields timer_ctx =
+	let open Timer in
 	let tot = ref 0. in
-	Hashtbl.iter (fun _ t -> tot := !tot +. t.total) Timer.htimers;
-	let fields = [("@TOTAL", Printf.sprintf "%.3fs" (get_time() -. start_time))] in
+	Hashtbl.iter (fun _ t -> tot := !tot +. t.total) timer_ctx.timer_lut;
+	let fields = [("@TOTAL", Printf.sprintf "%.3fs" (Extc.time() -. timer_ctx.start_time))] in
 	if !tot > 0. then
 		Hashtbl.fold (fun _ t acc ->
 			((String.concat "." t.id),(Printf.sprintf "%.3fs (%.0f%%)" t.total (t.total *. 100. /. !tot))) :: acc
-		) Timer.htimers fields
+		) timer_ctx.timer_lut fields
 	else
 		fields
 
@@ -272,11 +272,10 @@ let handle_display_exception_old ctx dex = match dex with
 		raise (Completion (String.concat "." pack))
 	| DisplayFields r ->
 		DisplayPosition.display_position#reset;
-		let fields = if !Timer.measure_times then begin
-			Timer.close_times();
+		let fields = if ctx.com.timer_ctx.measure_times = Yes then begin
 			(List.map (fun (name,value) ->
 				CompletionItem.make_ci_timer ("@TIME " ^ name) value
-			) (get_timer_fields !Helper.start_time)) @ r.fitems
+			) (get_timer_fields ctx.com.timer_ctx)) @ r.fitems
 		end else
 			r.fitems
 		in

+ 8 - 20
src/compiler/displayProcessing.ml

@@ -20,7 +20,7 @@ let handle_display_argument_old com file_pos actx =
 		raise (Completion (DisplayOutput.print_keywords ()))
 	| "memory" ->
 		actx.did_something <- true;
-		(try Memory.display_memory com with e -> prerr_endline (Printexc.get_backtrace ()));
+		(try DisplayMemory.display_memory com with e -> prerr_endline (Printexc.get_backtrace ()));
 	| "diagnostics" ->
 		com.report_mode <- RMLegacyDiagnostics []
 	| _ ->
@@ -29,7 +29,6 @@ let handle_display_argument_old com file_pos actx =
 		let file_unique = com.file_keys#get file in
 		let pos, smode = try ExtString.String.split pos "@" with _ -> pos,"" in
 		let create mode =
-			Parser.display_mode := mode;
 			DisplayTypes.DisplayMode.create mode
 		in
 		let dm = match smode with
@@ -81,7 +80,7 @@ let process_display_arg ctx actx =
 		if String.length input > 0 && (input.[0] = '[' || input.[0] = '{') then begin
 			actx.did_something <- true;
 			actx.force_typing <- true;
-			DisplayJson.parse_input ctx.com input Timer.measure_times
+			DisplayJson.parse_input ctx.com input
 		end else
 			handle_display_argument_old ctx.com input actx;
 	| None ->
@@ -103,17 +102,6 @@ let process_display_configuration ctx =
 			| WMDisable ->
 				()
 		);
-	end;
-	Lexer.old_format := Common.defined com Define.OldErrorFormat;
-	if !Lexer.old_format && !Parser.in_display then begin
-		let p = DisplayPosition.display_position#get in
-		(* convert byte position to utf8 position *)
-		try
-			let content = Std.input_file ~bin:true (Path.get_real_path p.pfile) in
-			let pos = Extlib_leftovers.UTF8.length (String.sub content 0 p.pmin) in
-			DisplayPosition.display_position#set { p with pmin = pos; pmax = pos }
-		with _ ->
-			() (* ignore *)
 	end
 
 let process_display_file com actx =
@@ -143,7 +131,7 @@ let process_display_file com actx =
 			DPKNone
 		| DFPOnly when (DisplayPosition.display_position#get).pfile = file_input_marker ->
 			actx.classes <- [];
-			com.main.main_class <- None;
+			com.main.main_path <- None;
 			begin match com.file_contents with
 			| [_, Some input] ->
 				com.file_contents <- [];
@@ -154,7 +142,7 @@ let process_display_file com actx =
 		| dfp ->
 			if dfp = DFPOnly then begin
 				actx.classes <- [];
-				com.main.main_class <- None;
+				com.main.main_path <- None;
 			end;
 			let dpk = List.map (fun file_key ->
 				let real = Path.get_real_path (Path.UniqueKey.to_string file_key) in
@@ -276,7 +264,7 @@ let maybe_load_display_file_before_typing tctx display_file_dot_path = match dis
 let handle_display_after_typing ctx tctx display_file_dot_path =
 	let com = ctx.com in
 	if ctx.com.display.dms_kind = DMNone && ctx.has_error then raise Abort;
-	begin match ctx.com.display.dms_kind,!Parser.delayed_syntax_completion with
+	begin match ctx.com.display.dms_kind,Atomic.get ctx.com.parser_state.delayed_syntax_completion with
 		| DMDefault,Some(kind,subj) -> DisplayOutput.handle_syntax_completion com kind subj
 		| _ -> ()
 	end;
@@ -315,9 +303,9 @@ let process_global_display_mode com tctx =
 	promote_type_hints tctx;
 	match com.display.dms_kind with
 	| DMUsage (with_definition,_,_) ->
-		FindReferences.find_references tctx com with_definition
+		FindReferences.find_references com with_definition
 	| DMImplementation ->
-		FindReferences.find_implementations tctx com
+		FindReferences.find_implementations com
 	| DMModuleSymbols filter ->
 		let open CompilationCache in
 		let cs = com.cs in
@@ -359,7 +347,7 @@ let handle_display_after_finalization ctx tctx display_file_dot_path =
 	| RMDiagnostics _ ->
 		DisplayOutput.emit_diagnostics com
 	| RMStatistics ->
-		DisplayOutput.emit_statistics tctx
+		DisplayOutput.emit_statistics com
 	| RMNone ->
 		()
 	end

+ 5 - 0
src/compiler/dune

@@ -0,0 +1,5 @@
+(rule
+	(deps (env_var ADD_REVISION) (universe))
+	(targets version.ml)
+	(action (with-stdout-to version.ml (run ../prebuild.exe version)))
+)

+ 36 - 40
src/compiler/generate.ml

@@ -18,20 +18,20 @@ let check_auxiliary_output com actx =
 		| Some file ->
 			Common.log com ("Generating json : " ^ file);
 			Path.mkdir_from_path file;
-			Genjson.generate com.types file
+			Genjson.generate com.timer_ctx com.types file
 	end
 
-let create_writer com config string_pool =
+let create_writer com config =
 	let anon_identification = new tanon_identification in
 	let warn w s p = com.Common.warning w com.warning_options s p in
-	let writer = HxbWriter.create config string_pool warn anon_identification in
+	let writer = HxbWriter.create config warn anon_identification in
 	writer,(fun () ->
 		let out = IO.output_string () in
 		HxbWriter.export writer out;
 		IO.close_out out
 	)
 
-let export_hxb from_cache com config string_pool cc platform zip m =
+let export_hxb from_cache com config cc platform m =
 	let open HxbData in
 	match m.m_extra.m_kind with
 		| MCode | MMacro | MFake | MExtern -> begin
@@ -48,69 +48,65 @@ let export_hxb from_cache com config string_pool cc platform zip m =
 					IO.nwrite out data
 				) hxb_cache.mc_chunks;
 				let data = IO.close_out out in
-				zip#add_entry data path;
+				Some (path,data)
 			end else begin
-				let writer,close = create_writer com config string_pool in
+				let writer,close = create_writer com config in
 				HxbWriter.write_module writer m;
 				let bytes = close () in
-				zip#add_entry bytes path;
+				Some (path,bytes)
 			end
 		end
 	| _ ->
-		()
+		None
 
 let check_hxb_output ctx config =
 	let open HxbWriterConfig in
 	let com = ctx.com in
-	let write_string_pool config zip name pool =
-		let writer,close = create_writer com config (Some pool) in
-		let a = StringPool.finalize writer.cp in
-		HxbWriter.HxbWriter.write_string_pool writer STR a;
-		let bytes = close () in
-		zip#add_entry bytes name;
-	in
 	let match_path_list l sl_path =
 		List.exists (fun sl -> Ast.match_path true sl_path sl) l
 	in
 	let try_write from_cache =
 		let path = config.HxbWriterConfig.archive_path in
 		let path = Str.global_replace (Str.regexp "\\$target") (platform_name ctx.com.platform) path in
-		let t = Timer.timer ["generate";"hxb"] in
+		let t = Timer.start_timer ctx.timer_ctx ["generate";"hxb"] in
 		Path.mkdir_from_path path;
 		let zip = new Zip_output.zip_output path 6 in
-		let export com config string_pool =
+		let export com config =
 			let cc = CommonCache.get_cache com in
 			let target = Common.platform_name_macro com in
-
-			List.iter (fun m ->
-				let t = Timer.timer ["generate";"hxb";s_type_path m.m_path] in
+			let f m =
 				let sl_path = fst m.m_path @ [snd m.m_path] in
 				if not (match_path_list config.exclude sl_path) || match_path_list config.include' sl_path then
-					Std.finally t (export_hxb from_cache com config string_pool cc target zip) m
-			) com.modules;
+					Timer.time ctx.timer_ctx ["generate";"hxb";s_type_path m.m_path] (export_hxb from_cache com config cc target) m
+				else
+					None
+			in
+			let a_in = Array.of_list com.modules in
+			let a_out = Parallel.run_in_new_pool com.timer_ctx (fun pool ->
+				Parallel.ParallelArray.map pool f a_in None
+			) in
+			Array.iter (function
+				| None ->
+					()
+				| Some(path,bytes) ->
+					zip#add_entry bytes path
+			) a_out
 		in
 		Std.finally (fun () ->
 			zip#close;
 			t()
 		) (fun () ->
-			let string_pool = if config.share_string_pool then Some (StringPool.create ()) else None in
 			if config.target_config.generate then begin
-				export com config.target_config string_pool;
+				export com config.target_config
 			end;
 
 			if config.macro_config.generate then begin
 				match com.get_macros() with
 					| Some mcom ->
-						let use_separate_pool = config.share_string_pool && from_cache in
-						let string_pool = if use_separate_pool then Some (StringPool.create ()) else string_pool in
-						export mcom config.macro_config string_pool;
-						if use_separate_pool then write_string_pool config.macro_config zip "StringPool.macro.hxb" (Option.get string_pool)
+						export mcom config.macro_config;
 					| _ ->
 						()
 			end;
-
-			if config.share_string_pool then
-				write_string_pool config.target_config zip "StringPool.hxb" (Option.get string_pool);
 		) ()
 	in
 	try
@@ -135,14 +131,16 @@ let delete_file f = try Sys.remove f with _ -> ()
 let maybe_generate_dump ctx tctx =
 	let com = tctx.Typecore.com in
 	if Common.defined com Define.Dump then begin
-		Codegen.Dump.dump_types com;
-		Option.may Codegen.Dump.dump_types (com.get_macros())
+		Timer.time ctx.timer_ctx ["generate";"dump"] (fun () ->
+			Dump.dump_types com;
+			Option.may Dump.dump_types (com.get_macros());
+		) ();
 	end;
 	if Common.defined com Define.DumpDependencies then begin
-		Codegen.Dump.dump_dependencies com;
+		Dump.dump_dependencies com;
 		if not com.is_macro_context then match tctx.Typecore.g.Typecore.macros with
 			| None -> ()
-			| Some(_,ctx) -> Codegen.Dump.dump_dependencies ~target_override:(Some "macro") ctx.Typecore.com
+			| Some(_,ctx) -> Dump.dump_dependencies ~target_override:(Some "macro") ctx.Typecore.com
 	end
 
 let generate ctx tctx ext actx =
@@ -150,7 +148,7 @@ let generate ctx tctx ext actx =
 	(* check file extension. In case of wrong commandline, we don't want
 		to accidentaly delete a source file. *)
 	if Path.file_extension com.file = ext then delete_file com.file;
-	if com.platform = Flash || com.platform = Cpp || com.platform = Hl then List.iter (Codegen.fix_overrides com) com.types;
+	if com.platform = Flash || com.platform = Cpp || com.platform = Hl then List.iter (FixOverrides.fix_overrides com) com.types;
 	begin match com.platform with
 		| Neko | Hl | Eval when actx.interp -> ()
 		| Cpp when Common.defined com Define.Cppia -> ()
@@ -158,7 +156,7 @@ let generate ctx tctx ext actx =
 		| _ -> Path.mkdir_from_path com.file
 	end;
 	if actx.interp then begin
-		let timer = Timer.timer ["interp"] in
+		let timer = Timer.start_timer ctx.timer_ctx ["interp"] in
 		let old = tctx.com.args in
 		tctx.com.args <- ctx.runtime_args;
 		let restore () =
@@ -200,8 +198,6 @@ let generate ctx tctx ext actx =
 		if name = "" then ()
 		else begin
 			Common.log com ("Generating " ^ name ^ ": " ^ com.file);
-			let t = Timer.timer ["generate";name] in
-			generate (Common.to_gctx com);
-			t()
+			Timer.time com.timer_ctx ["generate";name] generate (Common.to_gctx com);
 		end
 	end

+ 2 - 3
src/compiler/haxe.ml

@@ -43,12 +43,11 @@
 *)
 open Server
 
-let other = Timer.timer ["other"];;
+;;
 Sys.catch_break true;
 
 let args = List.tl (Array.to_list Sys.argv) in
 set_binary_mode_out stdout true;
 set_binary_mode_out stderr true;
 let sctx = ServerCompilationContext.create false in
-Server.process sctx (Communication.create_stdio ()) args;
-other()
+Server.process sctx (Communication.create_stdio ()) args;

+ 0 - 2
src/compiler/helper.ml

@@ -3,8 +3,6 @@ exception HelpMessage of string
 
 let is_debug_run = try Sys.getenv "HAXEDEBUG" = "1" with _ -> false
 
-let start_time = ref (Timer.get_time())
-
 let prompt = ref false
 
 let expand_env ?(h=None) path  =

+ 21 - 29
src/compiler/hxb/hxbLib.ml

@@ -2,7 +2,7 @@ open Globals
 open Common
 open ExtString
 
-class hxb_library file_path hxb_times = object(self)
+class hxb_library timer_ctx file_path hxb_times = object(self)
 	inherit abstract_hxb_lib
 	val zip = lazy (Zip.open_in file_path)
 
@@ -13,42 +13,34 @@ class hxb_library file_path hxb_times = object(self)
 	val mutable string_pool : string array option = None
 	val mutable macro_string_pool : string array option = None
 
+	method private do_load =
+		List.iter (function
+		| ({ Zip.is_directory = false; Zip.filename = filename } as entry) when String.ends_with filename ".hxb" ->
+			let pack = String.nsplit filename "/" in
+			begin match List.rev pack with
+				| [] -> ()
+				| name :: pack ->
+					let name = String.sub name 0 (String.length name - 4) in
+					let pack = List.rev pack in
+					Hashtbl.add modules (pack,name) (filename,entry);
+				end
+		| _ -> ()
+	) (Zip.entries (Lazy.force zip));
+
 	method load =
 		if not loaded then begin
 			loaded <- true;
-			let close = Timer.timer ["hxblib";"read"] in
-			List.iter (function
-				| ({ Zip.filename = "StringPool.hxb" | "StringPool.macro.hxb" as filename} as entry) ->
-					let reader = new HxbReader.hxb_reader (["hxb";"internal"],"StringPool") (HxbReader.create_hxb_reader_stats()) None hxb_times in
-					let zip = Lazy.force zip in
-					let data = Bytes.unsafe_of_string (Zip.read_entry zip entry) in
-					ignore(reader#read (new HxbReaderApi.hxb_reader_api_null) data STR);
-					if filename = "StringPool.hxb" then
-						string_pool <- reader#get_string_pool
-					else
-						macro_string_pool <- reader#get_string_pool
-				| ({ Zip.is_directory = false; Zip.filename = filename } as entry) when String.ends_with filename ".hxb" ->
-					let pack = String.nsplit filename "/" in
-					begin match List.rev pack with
-						| [] -> ()
-						| name :: pack ->
-							let name = String.sub name 0 (String.length name - 4) in
-							let pack = List.rev pack in
-							Hashtbl.add modules (pack,name) (filename,entry);
-						end
-				| _ -> ()
-			) (Zip.entries (Lazy.force zip));
-			close();
+			Timer.time timer_ctx ["hxblib";"read"] (fun () -> self#do_load) ()
 		end
 
 	method get_bytes (target : string) (path : path) =
 		try
 			let path = (target :: fst path,snd path) in
 			let (filename,entry) = Hashtbl.find modules path in
-			let close = Timer.timer ["hxblib";"get bytes"] in
-			let zip = Lazy.force zip in
-			let data = Zip.read_entry zip entry in
-			close();
+			let data = Timer.time timer_ctx ["hxblib";"get bytes"] (fun () ->
+				let zip = Lazy.force zip in
+				Zip.read_entry zip entry
+			) () in
 			Some (Bytes.unsafe_of_string data)
 		with Not_found ->
 			None
@@ -74,4 +66,4 @@ let create_hxb_lib com file_path =
 	with Not_found ->
 		failwith ("hxb lib " ^ file_path ^ " not found")
 	in
-	new hxb_library file (Common.defined com Define.HxbTimes)
+	new hxb_library com.timer_ctx file (if Common.defined com Define.HxbTimes then Some com.timer_ctx else None)

+ 187 - 118
src/compiler/hxb/hxbReader.ml

@@ -148,27 +148,25 @@ let dump_stats name stats =
 class hxb_reader
 	(mpath : path)
 	(stats : hxb_reader_stats)
-	(string_pool : string array option)
-	(timers_enabled : bool)
+	(timer_ctx : Timer.timer_context option)
 = object(self)
 	val mutable api = Obj.magic ""
-	val mutable minimal_restore = false
+	val mutable full_restore = true
 	val mutable current_module = null_module
 
 	val mutable ch = BytesWithPosition.create (Bytes.create 0)
-	val mutable has_string_pool = (string_pool <> None)
-	val mutable string_pool = (match string_pool with None -> Array.make 0 "" | Some pool -> pool)
+	val mutable string_pool = Array.make 0 ""
 	val mutable doc_pool = Array.make 0 ""
 
-	val mutable classes = Array.make 0 null_class
-	val mutable abstracts = Array.make 0 null_abstract
-	val mutable enums = Array.make 0 null_enum
-	val mutable typedefs = Array.make 0 null_typedef
+	val mutable classes = Array.make 0 (Lazy.from_val null_class)
+	val mutable abstracts = Array.make 0 (Lazy.from_val null_abstract)
+	val mutable enums = Array.make 0 (Lazy.from_val null_enum)
+	val mutable typedefs = Array.make 0 (Lazy.from_val null_typedef)
 	val mutable anons = Array.make 0 null_tanon
 	val mutable anon_fields = Array.make 0 null_field
 	val mutable tmonos = Array.make 0 (mk_mono())
-	val mutable class_fields = Array.make 0 null_field
-	val mutable enum_fields = Array.make 0 null_enum_field
+	val mutable class_fields = Array.make 0 (Lazy.from_val null_field)
+	val mutable enum_fields = Array.make 0 (Lazy.from_val null_enum_field)
 
 	val mutable type_type_parameters = Array.make 0 (mk_type_param null_class TPHType None None)
 	val mutable field_type_parameters = Array.make 0 (mk_type_param null_class TPHMethod None None)
@@ -179,16 +177,19 @@ class hxb_reader
 
 	method resolve_type pack mname tname =
 		try
-			api#resolve_type pack mname tname
+			let mt = api#resolve_type pack mname tname in
+			if not full_restore then begin
+				let mdep = (t_infos mt).mt_module in
+				if mdep != null_module && current_module.m_path != mdep.m_path then
+					current_module.m_extra.m_display_deps <- Some (PMap.add mdep.m_id (create_dependency mdep MDepFromTyping) (Option.get current_module.m_extra.m_display_deps))
+			end;
+			mt
 		with Not_found ->
 			dump_backtrace();
 			error (Printf.sprintf "[HXB] [%s] Cannot resolve type %s" (s_type_path current_module.m_path) (s_type_path ((pack @ [mname]),tname)))
 
-	method get_string_pool =
-		if has_string_pool then
-			Some (string_pool)
-		else
-			None
+	method make_lazy_type_dynamic f : Type.t =
+		api#make_lazy_type t_dynamic f
 
 	(* Primitives *)
 
@@ -300,10 +301,12 @@ class hxb_reader
 		typedefs.(read_uleb128 ch)
 
 	method read_field_ref =
-		class_fields.(read_uleb128 ch)
+		let cf = class_fields.(read_uleb128 ch) in
+		Lazy.force cf
 
 	method read_enum_field_ref =
-		enum_fields.(read_uleb128 ch)
+		let ef = enum_fields.(read_uleb128 ch) in
+		Lazy.force ef
 
 	method read_anon_ref =
 		match read_byte ch with
@@ -749,13 +752,18 @@ class hxb_reader
 			(mk_type_param { null_class with cl_path = path } TPHUnbound None None).ttp_type
 		| 10 ->
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			c.cl_type
 		| 11 ->
 			let en = self#read_enum_ref in
-			en.e_type
+			self#make_lazy_type_dynamic (fun () ->
+				(Lazy.force en).e_type
+			)
 		| 12 ->
 			let a = self#read_abstract_ref in
-			TType(abstract_module_type a [],[])
+			(* self#make_lazy_type_dynamic (fun () -> *)
+				TType(abstract_module_type (Lazy.force a) [],[])
+			(* ) *)
 		| 13 ->
 			let e = self#read_expr in
 			let c = {null_class with cl_kind = KExpr e; cl_module = current_module } in
@@ -814,68 +822,96 @@ class hxb_reader
 			TFun(args,ret)
 		| 40 ->
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			TInst(c,[])
 		| 41 ->
 			let c = self#read_class_ref in
 			let t1 = self#read_type_instance in
+			let c = Lazy.force c in
 			TInst(c,[t1])
 		| 42 ->
 			let c = self#read_class_ref in
 			let t1 = self#read_type_instance in
 			let t2 = self#read_type_instance in
+			let c = Lazy.force c in
 			TInst(c,[t1;t2])
 		| 49 ->
 			let c = self#read_class_ref in
 			let tl = self#read_types in
+			let c = Lazy.force c in
 			TInst(c,tl)
 		| 50 ->
 			let en = self#read_enum_ref in
-			TEnum(en,[])
+			self#make_lazy_type_dynamic (fun () ->
+				TEnum(Lazy.force en,[])
+			)
 		| 51 ->
 			let en = self#read_enum_ref in
 			let t1 = self#read_type_instance in
-			TEnum(en,[t1])
+			self#make_lazy_type_dynamic (fun () ->
+				TEnum(Lazy.force en,[t1])
+			)
 		| 52 ->
 			let en = self#read_enum_ref in
 			let t1 = self#read_type_instance in
 			let t2 = self#read_type_instance in
-			TEnum(en,[t1;t2])
+			self#make_lazy_type_dynamic (fun () ->
+				TEnum(Lazy.force en,[t1;t2])
+			)
 		| 59 ->
 			let e = self#read_enum_ref in
 			let tl = self#read_types in
-			TEnum(e,tl)
+			self#make_lazy_type_dynamic (fun () ->
+				TEnum(Lazy.force e,tl)
+			)
 		| 60 ->
 			let td = self#read_typedef_ref in
-			TType(td,[])
+			self#make_lazy_type_dynamic (fun () ->
+				TType(Lazy.force td,[])
+			);
 		| 61 ->
 			let td = self#read_typedef_ref in
 			let t1 = self#read_type_instance in
-			TType(td,[t1])
+			self#make_lazy_type_dynamic (fun () ->
+				TType(Lazy.force td,[t1])
+			)
 		| 62 ->
 			let td = self#read_typedef_ref in
 			let t1 = self#read_type_instance in
 			let t2 = self#read_type_instance in
-			TType(td,[t1;t2])
+			self#make_lazy_type_dynamic (fun () ->
+				TType(Lazy.force td,[t1;t2])
+			)
 		| 69 ->
 			let t = self#read_typedef_ref in
 			let tl = self#read_types in
-			TType(t,tl)
+			self#make_lazy_type_dynamic (fun () ->
+				TType(Lazy.force t,tl)
+			)
 		| 70 ->
 			let a = self#read_abstract_ref in
-			TAbstract(a,[])
+			(* self#make_lazy_type_dynamic (fun () -> *)
+				TAbstract(Lazy.force a,[])
+			(* ) *)
 		| 71 ->
 			let a = self#read_abstract_ref in
 			let t1 = self#read_type_instance in
-			TAbstract(a,[t1])
+			(* self#make_lazy_type_dynamic (fun () -> *)
+				TAbstract(Lazy.force a,[t1])
+			(* ) *)
 		| 72 ->
 			let a = self#read_abstract_ref in
 			let t1 = self#read_type_instance in
 			let t2 = self#read_type_instance in
-			TAbstract(a,[t1;t2])
+			(* self#make_lazy_type_dynamic (fun () -> *)
+				TAbstract(Lazy.force a,[t1;t2])
+			(* ) *)
 		| 79 ->
 			let a = self#read_abstract_ref in
 			let tl = self#read_types in
-			TAbstract(a,tl)
+			(* self#make_lazy_type_dynamic (fun () -> *)
+				TAbstract(Lazy.force a,tl)
+			(* ) *)
 		| 80 ->
 			empty_anon
 		| 81 ->
@@ -1189,11 +1225,6 @@ class hxb_reader
 						let e1 = loop () in
 						let e2 = loop () in
 						TWhile(e1,e2,DoWhile),(Some api#basic_types.tvoid)
-					| 86 ->
-						let v  = declare_local () in
-						let e1 = loop () in
-						let e2 = loop () in
-						TFor(v,e1,e2),(Some api#basic_types.tvoid)
 
 					(* control flow 90-99 *)
 					| 90 ->
@@ -1218,12 +1249,14 @@ class hxb_reader
 					| 102 ->
 						let e1 = loop () in
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let tl = self#read_types in
 						let cf = self#read_field_ref in
 						TField(e1,FInstance(c,tl,cf)),None
 					| 103 ->
 						let e1 = loop () in
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let cf = self#read_field_ref in
 						TField(e1,FStatic(c,cf)),None
 					| 104 ->
@@ -1233,6 +1266,7 @@ class hxb_reader
 					| 105 ->
 						let e1 = loop () in
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let tl = self#read_types in
 						let cf = self#read_field_ref in
 						TField(e1,FClosure(Some(c,tl),cf)),None
@@ -1244,6 +1278,7 @@ class hxb_reader
 						let e1 = loop () in
 						let en = self#read_enum_ref in
 						let ef = self#read_enum_field_ref in
+						let en = Lazy.force en in
 						TField(e1,FEnum(en,ef)),None
 					| 108 ->
 						let e1 = loop () in
@@ -1253,12 +1288,14 @@ class hxb_reader
 					| 110 ->
 						let p = read_relpos () in
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let cf = self#read_field_ref in
 						let e1 = Texpr.Builder.make_static_this c p in
 						TField(e1,FStatic(c,cf)),None
 					| 111 ->
 						let p = read_relpos () in
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let tl = self#read_types in
 						let cf = self#read_field_ref in
 						let ethis = mk (TConst TThis) (Option.get fctx.tthis) p in
@@ -1267,14 +1304,16 @@ class hxb_reader
 					(* module types 120-139 *)
 					| 120 ->
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						TTypeExpr (TClassDecl c),(Some c.cl_type)
 					| 121 ->
 						let en = self#read_enum_ref in
+						let en = Lazy.force en in
 						TTypeExpr (TEnumDecl en),(Some en.e_type)
 					| 122 ->
-						TTypeExpr (TAbstractDecl self#read_abstract_ref),None
+						TTypeExpr (TAbstractDecl (Lazy.force self#read_abstract_ref)),None
 					| 123 ->
-						TTypeExpr (TTypeDecl self#read_typedef_ref),None
+						TTypeExpr (TTypeDecl (Lazy.force self#read_typedef_ref)),None
 					| 124 ->
 						TCast(loop (),None),None
 					| 125 ->
@@ -1284,6 +1323,7 @@ class hxb_reader
 						TCast(e1,Some mt),None
 					| 126 ->
 						let c = self#read_class_ref in
+						let c = Lazy.force c in
 						let tl = self#read_types in
 						let el = loop_el() in
 						TNew(c,tl,el),None
@@ -1335,8 +1375,9 @@ class hxb_reader
 	method read_class_field_forward =
 		let name = self#read_string in
 		let pos,name_pos = self#read_pos_pair in
+		let cf_meta = self#read_metadata in
 		let overloads = self#read_list (fun () -> self#read_class_field_forward) in
-		{ null_field with cf_name = name; cf_pos = pos; cf_name_pos = name_pos; cf_overloads = overloads }
+		{ null_field with cf_name = name; cf_pos = pos; cf_name_pos = name_pos; cf_overloads = overloads; cf_meta = cf_meta }
 
 	method start_texpr =
 		begin match read_byte ch with
@@ -1394,7 +1435,6 @@ class hxb_reader
 		let flags = read_uleb128 ch in
 
 		let doc = self#read_option (fun () -> self#read_documentation) in
-		cf.cf_meta <- self#read_metadata;
 		let kind = self#read_field_kind in
 
 		let expr,expr_unoptimized = match read_byte ch with
@@ -1483,6 +1523,7 @@ class hxb_reader
 		infos.mt_params <- Array.to_list type_type_parameters;
 		infos.mt_using <- self#read_list (fun () ->
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			let p = self#read_pos in
 			(c,p)
 		)
@@ -1494,11 +1535,12 @@ class hxb_reader
 		| 3 -> KGeneric
 		| 4 ->
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			let tl = self#read_types in
 			KGenericInstance(c,tl)
 		| 5 -> KMacroType
 		| 6 -> KGenericBuild (self#read_list (fun () -> self#read_cfield))
-		| 7 -> KAbstractImpl self#read_abstract_ref
+		| 7 -> KAbstractImpl (Lazy.force self#read_abstract_ref)
 		| 8 -> KModuleFields current_module
 		| i ->
 			error (Printf.sprintf "Invalid class kind id: %i" i)
@@ -1508,6 +1550,7 @@ class hxb_reader
 		c.cl_kind <- self#read_class_kind;
 		let read_relation () =
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			let tl = self#read_types in
 			(c,tl)
 		in
@@ -1521,7 +1564,7 @@ class hxb_reader
 
 	method read_abstract (a : tabstract) =
 		self#read_common_module_type (Obj.magic a);
-		a.a_impl <- self#read_option (fun () -> self#read_class_ref);
+		a.a_impl <- self#read_option (fun () -> Lazy.force self#read_class_ref);
 		begin match read_byte ch with
 			| 0 ->
 				a.a_this <- TAbstract(a,extract_param_types a.a_params)
@@ -1538,6 +1581,7 @@ class hxb_reader
 		a.a_read <- self#read_option (fun () -> self#read_field_ref);
 		a.a_write <- self#read_option (fun () -> self#read_field_ref);
 		a.a_call <- self#read_option (fun () -> self#read_field_ref);
+		a.a_constructor <- self#read_option (fun () -> self#read_field_ref);
 
 		a.a_ops <- self#read_list (fun () ->
 			let i = read_byte ch in
@@ -1600,7 +1644,10 @@ class hxb_reader
 		let a = Array.init l (fun i ->
 			let en = self#read_enum_ref in
 			let name = self#read_string in
-			PMap.find name en.e_constrs
+			Lazy.from_fun (fun () ->
+				let en = Lazy.force en in
+				PMap.find name en.e_constrs
+			)
 		) in
 		enum_fields <- a
 
@@ -1635,44 +1682,56 @@ class hxb_reader
 				| 3 -> CfrInit
 				| _ -> die "" __LOC__
 			in
-			let cf =  match kind with
-				| CfrStatic ->
-					let name = self#read_string in
-					begin try
-						PMap.find name c.cl_statics
-					with Not_found ->
-						raise (HxbFailure (Printf.sprintf "Could not read static field %s on %s while hxbing %s" name (s_type_path c.cl_path) (s_type_path current_module.m_path)))
-					end;
+			let name = match kind with
+				| CfrStatic
 				| CfrMember ->
-					let name = self#read_string in
-					begin try
-						PMap.find name c.cl_fields
-					with Not_found ->
-						raise (HxbFailure (Printf.sprintf "Could not read instance field %s on %s while hxbing %s" name (s_type_path c.cl_path) (s_type_path current_module.m_path)))
-					end
-				| CfrConstructor ->
-					Option.get c.cl_constructor
+					Some self#read_string
+				| CfrConstructor
 				| CfrInit ->
-					Option.get c.cl_init
-			in
-			let pick_overload cf depth =
-				let rec loop depth cfl = match cfl with
-					| cf :: cfl ->
-						if depth = 0 then
-							cf
-						else
-							loop (depth - 1) cfl
-					| [] ->
-						raise (HxbFailure (Printf.sprintf "Bad overload depth for %s on %s: %i" cf.cf_name (s_type_path c.cl_path) depth))
-				in
-				let cfl = cf :: cf.cf_overloads in
-				loop depth cfl
+					None
 			in
 			let depth = read_uleb128 ch in
-			if depth = 0 then
-				cf
-			else
-				pick_overload cf depth;
+
+			Lazy.from_fun (fun () ->
+				let c = Lazy.force c in
+				let cf = match kind with
+					| CfrStatic ->
+						let name = Option.get name in
+						begin try
+							PMap.find name c.cl_statics
+						with Not_found ->
+							raise (HxbFailure (Printf.sprintf "Could not read static field %s on %s while hxbing %s" name (s_type_path c.cl_path) (s_type_path current_module.m_path)))
+						end;
+					| CfrMember ->
+						let name = Option.get name in
+						begin try
+							PMap.find name c.cl_fields
+						with Not_found ->
+							raise (HxbFailure (Printf.sprintf "Could not read instance field %s on %s while hxbing %s" name (s_type_path c.cl_path) (s_type_path current_module.m_path)))
+						end
+					| CfrConstructor ->
+						Option.get c.cl_constructor
+					| CfrInit ->
+						Option.get c.cl_init
+				in
+				let pick_overload cf depth =
+					let rec loop depth cfl = match cfl with
+						| cf :: cfl ->
+							if depth = 0 then
+								cf
+							else
+								loop (depth - 1) cfl
+						| [] ->
+							raise (HxbFailure (Printf.sprintf "Bad overload depth for %s on %s: %i" cf.cf_name (s_type_path c.cl_path) depth))
+					in
+					let cfl = cf :: cf.cf_overloads in
+					loop depth cfl
+				in
+				if depth = 0 then
+					cf
+				else
+					pick_overload cf depth;
+			)
 		) in
 		class_fields <- a
 
@@ -1680,12 +1739,14 @@ class hxb_reader
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
 			let c = classes.(i) in
+			let c = Lazy.force c in
 			self#read_class_fields c;
 		done
 
 	method read_exd =
 		ignore(self#read_list (fun () ->
 			let c = self#read_class_ref in
+			let c = Lazy.force c in
 			self#read_list (fun () ->
 				let cf = self#read_field_ref in
 				let length = read_uleb128 ch in
@@ -1708,14 +1769,12 @@ class hxb_reader
 					read_expressions ()
 				else begin
 					let t = cf.cf_type in
-					let r = ref (lazy_available t) in
-					r := lazy_wait (fun() ->
+					let tl = api#make_lazy_type cf.cf_type (fun () ->
 						cf.cf_type <- t;
-						r := lazy_available t;
-						read_expressions ();
+						read_expressions();
 						t
-					);
-					cf.cf_type <- TLazy r
+					) in
+					cf.cf_type <- tl
 				end
 			)
 		))
@@ -1723,7 +1782,7 @@ class hxb_reader
 	method read_afd =
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
-			let a = abstracts.(i) in
+			let a = Lazy.force abstracts.(i) in
 			self#read_abstract_fields a;
 		done
 
@@ -1731,27 +1790,28 @@ class hxb_reader
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
 			let c = classes.(i) in
+			let c = Lazy.force c in
 			self#read_class c;
 		done
 
 	method read_abd =
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
-			let a = abstracts.(i) in
+			let a = Lazy.force abstracts.(i) in
 			self#read_abstract a;
 		done
 
 	method read_end =
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
-			let en = enums.(i) in
+			let en = Lazy.force enums.(i) in
 			self#read_enum en;
 		done
 
 	method read_efd =
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
-			let e = enums.(i) in
+			let e = Lazy.force enums.(i) in
 			self#read_enum_fields e;
 			Type.unify (TType(enum_module_type e,[])) e.e_type
 		done
@@ -1785,52 +1845,60 @@ class hxb_reader
 	method read_tdd =
 		let l = read_uleb128 ch in
 		for i = 0 to l - 1 do
-			let t = typedefs.(i) in
+			let t = Lazy.force typedefs.(i) in
 			self#read_typedef t;
 		done
 
 	method read_clr =
 		let l = read_uleb128 ch in
 		classes <- (Array.init l (fun i ->
-				let (pack,mname,tname) = self#read_full_path in
+			let (pack,mname,tname) = self#read_full_path in
+			Lazy.from_fun (fun () ->
 				match self#resolve_type pack mname tname with
 				| TClassDecl c ->
 					c
 				| _ ->
 					error ("Unexpected type where class was expected: " ^ (s_type_path (pack,tname)))
+			)
 		))
 
 	method read_abr =
 		let l = read_uleb128 ch in
 		abstracts <- (Array.init l (fun i ->
 			let (pack,mname,tname) = self#read_full_path in
-			match self#resolve_type pack mname tname with
-			| TAbstractDecl a ->
-				a
-			| _ ->
-				error ("Unexpected type where abstract was expected: " ^ (s_type_path (pack,tname)))
+			Lazy.from_fun (fun () ->
+				match self#resolve_type pack mname tname with
+				| TAbstractDecl a ->
+					a
+				| _ ->
+					error ("Unexpected type where abstract was expected: " ^ (s_type_path (pack,tname)))
+			)
 		))
 
 	method read_enr =
 		let l = read_uleb128 ch in
 		enums <- (Array.init l (fun i ->
 			let (pack,mname,tname) = self#read_full_path in
-			match self#resolve_type pack mname tname with
-			| TEnumDecl en ->
-				en
-			| _ ->
-				error ("Unexpected type where enum was expected: " ^ (s_type_path (pack,tname)))
+			Lazy.from_fun (fun () ->
+				match self#resolve_type pack mname tname with
+				| TEnumDecl en ->
+					en
+				| _ ->
+					error ("Unexpected type where enum was expected: " ^ (s_type_path (pack,tname)))
+			)
 		))
 
 	method read_tdr =
 		let l = read_uleb128 ch in
 		typedefs <- (Array.init l (fun i ->
 			let (pack,mname,tname) = self#read_full_path in
-			match self#resolve_type pack mname tname with
-			| TTypeDecl tpd ->
-				tpd
-			| _ ->
-				error ("Unexpected type where typedef was expected: " ^ (s_type_path (pack,tname)))
+			Lazy.from_fun (fun () ->
+				match self#resolve_type pack mname tname with
+				| TTypeDecl tpd ->
+					tpd
+				| _ ->
+					error ("Unexpected type where typedef was expected: " ^ (s_type_path (pack,tname)))
+			)
 		))
 
 	method read_imports =
@@ -1908,12 +1976,12 @@ class hxb_reader
 			| 2 ->
 				let td = mk_typedef current_module path pos name_pos (mk_mono()) in
 				td.t_params <- Array.to_list params;
-				typedefs <- Array.append typedefs (Array.make 1 td);
+				typedefs <- Array.append typedefs (Array.make 1 (Lazy.from_val td));
 				TTypeDecl td
 			| 3 ->
 				let a = mk_abstract current_module path pos name_pos in
 				a.a_params <- Array.to_list params;
-				abstracts <- Array.append abstracts (Array.make 1 a);
+				abstracts <- Array.append abstracts (Array.make 1 (Lazy.from_val a));
 				TAbstractDecl a
 			| _ ->
 				error ("Invalid type kind: " ^ (string_of_int kind));
@@ -1939,18 +2007,17 @@ class hxb_reader
 		match kind with
 		| STR ->
 			string_pool <- self#read_string_pool;
-			has_string_pool <- true;
 		| DOC ->
 			doc_pool <- self#read_string_pool;
 		| MDF ->
-			assert(has_string_pool);
 			current_module <- self#read_mdf;
 			incr stats.modules_partially_restored;
+			if not full_restore then current_module.m_extra.m_display_deps <- Some PMap.empty
 		| MTF ->
 			current_module.m_types <- self#read_mtf;
 			api#add_module current_module;
 		| IMP ->
-			if not minimal_restore then self#read_imports;
+			if full_restore then self#read_imports;
 		| CLR ->
 			self#read_clr;
 		| ENR ->
@@ -2002,13 +2069,15 @@ class hxb_reader
 			^ "Attach the following information:"
 		in
 		let backtrace = Printexc.raw_backtrace_to_string backtrace in
-		let s = Printf.sprintf "%s\nHaxe: %s\n%s" msg s_version_full backtrace in
-		failwith s
+		raise (Globals.Ice (msg, backtrace))
 
 	method private read_chunk_data kind =
 		let path = String.concat "_" (ExtLib.String.nsplit (s_type_path mpath) ".") in
 		let id = ["hxb";"read";string_of_chunk_kind kind;path] in
-		let close = if timers_enabled then Timer.timer id else fun() -> () in
+		let close = match timer_ctx with
+			| Some timer_ctx -> Timer.start_timer timer_ctx id
+			| None -> (fun () -> ())
+		in
 		try
 			self#read_chunk_data' kind
 		with Invalid_argument msg -> begin
@@ -2018,11 +2087,11 @@ class hxb_reader
 		close()
 
 	method read_chunks (new_api : hxb_reader_api) (chunks : cached_chunks) =
-		fst (self#read_chunks_until new_api chunks EOM false)
+		fst (self#read_chunks_until new_api chunks EOM true)
 
-	method read_chunks_until (new_api : hxb_reader_api) (chunks : cached_chunks) end_chunk minimal_restore' =
+	method read_chunks_until (new_api : hxb_reader_api) (chunks : cached_chunks) end_chunk full_restore' =
 		api <- new_api;
-		minimal_restore <- minimal_restore';
+		full_restore <- full_restore';
 		let rec loop = function
 			| (kind,data) :: chunks ->
 				ch <- BytesWithPosition.create data;
@@ -2035,7 +2104,7 @@ class hxb_reader
 
 	method read (new_api : hxb_reader_api) (bytes : bytes) =
 		api <- new_api;
-		minimal_restore <- false;
+		full_restore <- true;
 		ch <- BytesWithPosition.create bytes;
 		if (Bytes.to_string (read_bytes ch 3)) <> "hxb" then
 			raise (HxbFailure "magic");

+ 2 - 0
src/compiler/hxb/hxbReaderApi.ml

@@ -9,6 +9,7 @@ class virtual hxb_reader_api = object(self)
 	method virtual basic_types : basic_types
 	method virtual get_var_id : int -> int
 	method virtual read_expression_eagerly : tclass_field -> bool
+	method virtual make_lazy_type : Type.t -> (unit -> Type.t) -> Type.t
 end
 
 class hxb_reader_api_null = object(self)
@@ -21,4 +22,5 @@ class hxb_reader_api_null = object(self)
 	method basic_types = assert false
 	method get_var_id _ = assert false
 	method read_expression_eagerly _ = assert false
+	method make_lazy_type _ _ = assert false
 end

+ 46 - 81
src/compiler/hxb/hxbWriter.ml

@@ -45,17 +45,6 @@ let unop_index op flag = match op,flag with
 	| NegBits,Postfix -> 10
 	| Spread,Postfix -> 11
 
-module StringHashtbl = Hashtbl.Make(struct
-	type t = string
-
-	let equal =
-		String.equal
-
-	let hash s =
-		(* What's the best here? *)
-		Hashtbl.hash s
-end)
-
 module Pool = struct
 	type ('key,'value) t = {
 		lut : ('key,int) Hashtbl.t;
@@ -408,16 +397,13 @@ type hxb_writer = {
 	config : HxbWriterConfig.writer_target_config;
 	warn : Warning.warning -> string -> Globals.pos -> unit;
 	anon_id : Type.t Tanon_identification.tanon_identification;
+	identified_anons : (tanon,int) IdentityPool.t;
 	mutable current_module : module_def;
 	chunks : Chunk.t DynArray.t;
-	has_own_string_pool : bool;
 	cp : StringPool.t;
 	docs : StringPool.t;
 	mutable chunk : Chunk.t;
 
-	mutable in_expr : bool;
-	mutable sig_deps : module_def list;
-
 	classes : (path,tclass) Pool.t;
 	enums : (path,tenum) Pool.t;
 	typedefs : (path,tdef) Pool.t;
@@ -440,6 +426,7 @@ type hxb_writer = {
 	mutable wrote_local_type_param : bool;
 	mutable needs_local_context : bool;
 	unbound_ttp : (typed_type_param,unit) IdentityPool.t;
+	unclosed_mono : (tmono,unit) IdentityPool.t;
 	t_instance_chunk : Chunk.t;
 }
 
@@ -454,8 +441,7 @@ module HxbWriter = struct
 			^ "Attach the following information:"
 		in
 		let backtrace = Printexc.raw_backtrace_to_string backtrace in
-		let s = Printf.sprintf "%s\nHaxe: %s\n%s" msg s_version_full backtrace in
-		failwith s
+		raise (Globals.Ice (msg, backtrace))
 
 	let in_nested_scope writer = match writer.field_stack with
 		| [] -> false (* can happen for cl_init and in EXD *)
@@ -869,28 +855,20 @@ module HxbWriter = struct
 
 	(* References *)
 
-	let maybe_add_sig_dep writer m =
-		if not writer.in_expr && m.m_path <> writer.current_module.m_path && not (List.exists (fun m' -> m'.m_path = m.m_path) writer.sig_deps) then
-			writer.sig_deps <- m :: writer.sig_deps
-
 	let write_class_ref writer (c : tclass) =
 		let i = Pool.get_or_add writer.classes c.cl_path c in
-		maybe_add_sig_dep writer c.cl_module;
 		Chunk.write_uleb128 writer.chunk i
 
 	let write_enum_ref writer (en : tenum) =
 		let i = Pool.get_or_add writer.enums en.e_path en in
-		maybe_add_sig_dep writer en.e_module;
 		Chunk.write_uleb128 writer.chunk i
 
 	let write_typedef_ref writer (td : tdef) =
 		let i = Pool.get_or_add writer.typedefs td.t_path td in
-		maybe_add_sig_dep writer td.t_module;
 		Chunk.write_uleb128 writer.chunk i
 
 	let write_abstract_ref writer (a : tabstract) =
 		let i = Pool.get_or_add writer.abstracts a.a_path a in
-		maybe_add_sig_dep writer a.a_module;
 		Chunk.write_uleb128 writer.chunk i
 
 	let write_tmono_ref writer (mono : tmono) =
@@ -1028,26 +1006,33 @@ module HxbWriter = struct
 		end
 
 	and write_anon_ref writer (an : tanon) =
-		let pfm = Option.get (writer.anon_id#identify_anon ~strict:true an) in
 		try
-			let index = Pool.get writer.anons pfm.pfm_path in
+			let index = IdentityPool.get writer.identified_anons an in
 			Chunk.write_u8 writer.chunk 0;
 			Chunk.write_uleb128 writer.chunk index
 		with Not_found ->
-			let restore = start_temporary_chunk writer 256 in
-			writer.needs_local_context <- false;
-			write_anon writer an;
-			let bytes = restore (fun new_chunk -> Chunk.get_bytes new_chunk) in
-			if writer.needs_local_context then begin
-				let index = Pool.add writer.anons pfm.pfm_path None in
-				Chunk.write_u8 writer.chunk 1;
-				Chunk.write_uleb128 writer.chunk index;
-				Chunk.write_bytes writer.chunk bytes
-			end else begin
-				let index = Pool.add writer.anons pfm.pfm_path (Some bytes) in
+			let pfm = writer.anon_id#identify_anon ~strict:true an in
+			try
+				let index = Pool.get writer.anons pfm.pfm_path in
 				Chunk.write_u8 writer.chunk 0;
-				Chunk.write_uleb128 writer.chunk index;
-			end
+				Chunk.write_uleb128 writer.chunk index
+			with Not_found ->
+				let restore = start_temporary_chunk writer 256 in
+				writer.needs_local_context <- false;
+				write_anon writer an;
+				let bytes = restore (fun new_chunk -> Chunk.get_bytes new_chunk) in
+				if writer.needs_local_context then begin
+					let index = Pool.add writer.anons pfm.pfm_path None in
+					ignore(IdentityPool.add writer.identified_anons an index);
+					Chunk.write_u8 writer.chunk 1;
+					Chunk.write_uleb128 writer.chunk index;
+					Chunk.write_bytes writer.chunk bytes
+				end else begin
+					let index = Pool.add writer.anons pfm.pfm_path (Some bytes) in
+					ignore(IdentityPool.add writer.identified_anons an index);
+					Chunk.write_u8 writer.chunk 0;
+					Chunk.write_uleb128 writer.chunk index;
+				end
 
 	and write_anon_field_ref writer cf =
 		try
@@ -1197,7 +1182,19 @@ module HxbWriter = struct
 			| TInst ({cl_path = ([],"String")},[]) ->
 				Chunk.write_u8 writer.chunk 104;
 			| TMono r ->
-				Monomorph.close r;
+				(try Monomorph.close r with TUnification.Unify_error e ->
+					try ignore(IdentityPool.get writer.unclosed_mono r) with Not_found -> begin
+						ignore(IdentityPool.add writer.unclosed_mono r ());
+
+						let p = file_pos (Path.UniqueKey.lazy_path writer.current_module.m_extra.m_file) in
+						let msg = Printf.sprintf
+							"Error while handling unclosed monomorph:\n%s\n\n%s"
+								(Error.error_msg (Unify e))
+								"Unclosed monomorph should not reach hxb writer, please submit an issue at https://github.com/HaxeFoundation/haxe/issues/new"
+						in
+						writer.warn WUnclosedMonomorph msg p
+					end;
+				);
 				begin match r.tm_type with
 				| None ->
 					Chunk.write_u8 writer.chunk 0;
@@ -1486,12 +1483,6 @@ module HxbWriter = struct
 				loop e1;
 				loop e2;
 				false;
-			| TFor(v,e1,e2) ->
-				Chunk.write_u8 writer.chunk 86;
-				declare_var v;
-				loop e1;
-				loop e2;
-				false;
 			(* control flow 90-99 *)
 			| TReturn None ->
 				Chunk.write_u8 writer.chunk 90;
@@ -1744,6 +1735,7 @@ module HxbWriter = struct
 	and write_class_field_forward writer cf =
 		Chunk.write_string writer.chunk cf.cf_name;
 		write_pos_pair writer cf.cf_pos cf.cf_name_pos;
+		write_metadata writer cf.cf_meta;
 		Chunk.write_list writer.chunk cf.cf_overloads (fun cf ->
 			write_class_field_forward writer cf;
 		);
@@ -1792,7 +1784,6 @@ module HxbWriter = struct
 		write_type_instance writer cf.cf_type;
 		Chunk.write_uleb128 writer.chunk cf.cf_flags;
 		maybe_write_documentation writer cf.cf_doc;
-		write_metadata writer cf.cf_meta;
 		write_field_kind writer cf.cf_kind;
 		let expr_chunk = match cf.cf_expr with
 			| None ->
@@ -1801,21 +1792,15 @@ module HxbWriter = struct
 			| Some e when not write_expr_immediately ->
 				Chunk.write_u8 writer.chunk 2;
 				let fctx,close = start_texpr writer e.epos in
-				let old = writer.in_expr in
-				writer.in_expr <- true;
 				write_texpr writer fctx e;
 				Chunk.write_option writer.chunk cf.cf_expr_unoptimized (write_texpr writer fctx);
-				writer.in_expr <- old;
 				let expr_chunk = close() in
 				Some expr_chunk
 			| Some e ->
 				Chunk.write_u8 writer.chunk 1;
 				let fctx,close = start_texpr writer e.epos in
-				let old = writer.in_expr in
-				writer.in_expr <- true;
 				write_texpr writer fctx e;
 				Chunk.write_option writer.chunk cf.cf_expr_unoptimized (write_texpr writer fctx);
-				writer.in_expr <- old;
 				let expr_pre_chunk,expr_chunk = close() in
 				Chunk.export_data expr_pre_chunk writer.chunk;
 				Chunk.export_data expr_chunk writer.chunk;
@@ -1928,6 +1913,7 @@ module HxbWriter = struct
 		Chunk.write_option writer.chunk a.a_read (write_field_ref writer c CfrStatic );
 		Chunk.write_option writer.chunk a.a_write (write_field_ref writer c CfrStatic);
 		Chunk.write_option writer.chunk a.a_call (write_field_ref writer c CfrStatic);
+		Chunk.write_option writer.chunk a.a_constructor (write_field_ref writer c CfrStatic);
 
 		Chunk.write_list writer.chunk a.a_ops (fun (op, cf) ->
 			Chunk.write_u8 writer.chunk (binop_index op);
@@ -2263,27 +2249,12 @@ module HxbWriter = struct
 			end;
 		end;
 
-		(* Note: this is only a start, and is still including a lot of dependencies *)
-		(* that are not actually needed for signature only. *)
-		let sig_deps = ref PMap.empty in
-		List.iter (fun mdep ->
-			let dep = {md_sign = mdep.m_extra.m_sign; md_path = mdep.m_path; md_kind = mdep.m_extra.m_kind; md_origin = MDepFromTyping} in
-			sig_deps := PMap.add mdep.m_id dep !sig_deps;
-		) writer.sig_deps;
-		PMap.iter (fun id mdep -> match mdep.md_kind, mdep.md_origin with
-			| (MCode | MExtern), MDepFromMacro when mdep.md_sign = m.m_extra.m_sign -> sig_deps := PMap.add id mdep !sig_deps;
-			| _ -> ()
-		) m.m_extra.m_deps;
-		m.m_extra.m_sig_deps <- Some !sig_deps;
-
 		start_chunk writer EOT;
 		start_chunk writer EOF;
 		start_chunk writer EOM;
 
-		if writer.has_own_string_pool then begin
-			let a = StringPool.finalize writer.cp in
-			write_string_pool writer STR a
-		end;
+		let a = StringPool.finalize writer.cp in
+		write_string_pool writer STR a;
 		begin
 			let a = StringPool.finalize writer.docs in
 			if a.length > 0 then
@@ -2298,23 +2269,16 @@ module HxbWriter = struct
 		l
 end
 
-let create config string_pool warn anon_id =
-	let cp,has_own_string_pool = match string_pool with
-		| None ->
-			StringPool.create(),true
-		| Some pool ->
-			pool,false
-	in
+let create config warn anon_id =
+	let cp = StringPool.create() in
 	{
 		config;
 		warn;
 		anon_id;
+		identified_anons = IdentityPool.create();
 		current_module = null_module;
 		chunks = DynArray.create ();
 		cp = cp;
-		has_own_string_pool;
-		sig_deps = [];
-		in_expr = false;
 		docs = StringPool.create ();
 		chunk = Obj.magic ();
 		classes = Pool.create ();
@@ -2338,6 +2302,7 @@ let create config string_pool warn anon_id =
 		wrote_local_type_param = false;
 		needs_local_context = false;
 		unbound_ttp = IdentityPool.create ();
+		unclosed_mono = IdentityPool.create ();
 		t_instance_chunk = Chunk.create EOM cp 32;
 	}
 

+ 0 - 2
src/compiler/hxb/hxbWriterConfig.ml

@@ -11,7 +11,6 @@ type writer_target_config = {
 
 type t = {
 	mutable archive_path : string;
-	mutable share_string_pool : bool;
 	target_config : writer_target_config;
 	macro_config : writer_target_config;
 }
@@ -26,7 +25,6 @@ let create_target_config () = {
 
 let create () = {
 	archive_path = "";
-	share_string_pool = true; (* Do we want this as default? *)
 	target_config = create_target_config ();
 	macro_config = create_target_config ()
 }

+ 14 - 14
src/compiler/messageReporting.ml

@@ -74,7 +74,7 @@ let create_error_context absolute_positions = {
 	previous = None;
 }
 
-let compiler_pretty_message_string com ectx cm =
+let compiler_pretty_message_string defines ectx cm =
 	match cm.cm_message with
 	(* Filter some messages that don't add much when using this message renderer *)
 	| "End of overload failure reasons" -> None
@@ -142,7 +142,7 @@ let compiler_pretty_message_string com ectx cm =
 
 		let gutter_len = (try String.length (Printf.sprintf "%d" (IntMap.find cm.cm_depth ectx.max_lines)) with Not_found -> 0) + 2 in
 
-		let no_color = Define.defined com.defines Define.MessageNoColor in
+		let no_color = Define.defined defines Define.MessageNoColor in
 		let c_reset = if no_color then "" else "\x1b[0m" in
 		let c_bold = if no_color then "" else "\x1b[1m" in
 		let c_dim = if no_color then "" else "\x1b[2m" in
@@ -316,21 +316,21 @@ let get_max_line max_lines messages =
 		else max_lines
 	) max_lines messages
 
-let display_source_at com p =
-	let absolute_positions = Define.defined com.defines Define.MessageAbsolutePositions in
+let display_source_at defines p =
+	let absolute_positions = Define.defined defines Define.MessageAbsolutePositions in
 	let ectx = create_error_context absolute_positions in
 	let msg = make_compiler_message "" p 0 MessageKind.DKCompilerMessage MessageSeverity.Information in
 	ectx.max_lines <- get_max_line ectx.max_lines [msg];
-	match compiler_pretty_message_string com ectx msg with
+	match compiler_pretty_message_string defines ectx msg with
 		| None -> ()
 		| Some s -> prerr_endline s
 
 exception ConfigError of string
 
-let get_formatter com def default =
-	let format_mode = Define.defined_value_safe ~default com.defines def in
+let get_formatter defines def default =
+	let format_mode = Define.defined_value_safe ~default defines def in
 	match format_mode with
-		| "pretty" -> compiler_pretty_message_string com
+		| "pretty" -> compiler_pretty_message_string defines
 		| "indent" -> compiler_indented_message_string
 		| "classic" -> compiler_message_string
 		| m -> begin
@@ -345,11 +345,11 @@ let print_error (err : Error.error) =
 	) err;
 	!ret
 
-let format_messages com messages =
-	let absolute_positions = Define.defined com.defines Define.MessageAbsolutePositions in
+let format_messages defines messages =
+	let absolute_positions = Define.defined defines Define.MessageAbsolutePositions in
 	let ectx = create_error_context absolute_positions in
 	ectx.max_lines <- get_max_line ectx.max_lines messages;
-	let message_formatter = get_formatter com Define.MessageReporting "pretty" in
+	let message_formatter = get_formatter defines Define.MessageReporting "pretty" in
 	let lines = List.rev (
 		List.fold_left (fun lines cm -> match (message_formatter ectx cm) with
 			| None -> lines
@@ -369,14 +369,14 @@ let display_messages ctx on_message = begin
 	in
 
 	let get_formatter _ def default =
-		try get_formatter ctx.com def default
+		try get_formatter ctx.com.defines def default
 		with | ConfigError s ->
 			error s;
 			compiler_message_string
 	in
 
-	let message_formatter = get_formatter ctx.com Define.MessageReporting "pretty" in
-	let log_formatter = get_formatter ctx.com Define.MessageLogFormat "indent" in
+	let message_formatter = get_formatter ctx.com.defines Define.MessageReporting "pretty" in
+	let log_formatter = get_formatter ctx.com.defines Define.MessageLogFormat "indent" in
 
 	let log_messages = ref (Define.defined ctx.com.defines Define.MessageLogFile) in
 	let log_message = ref None in

+ 53 - 60
src/compiler/server.ml

@@ -1,7 +1,6 @@
 open Globals
 open Common
 open CompilationCache
-open Timer
 open Type
 open DisplayProcessingGlobals
 open Ipaddr
@@ -54,16 +53,16 @@ let parse_file cs com (rfile : ClassPaths.resolved_file) p =
 		TypeloadParse.parse_file_from_string com file p stdin
 	| _ ->
 		let ftime = file_time ffile in
-		let data = Std.finally (Timer.timer ["server";"parser cache"]) (fun () ->
+		let data = Std.finally (Timer.start_timer com.timer_ctx ["server";"parser cache"]) (fun () ->
 			try
 				let cfile = cc#find_file fkey in
 				if cfile.c_time <> ftime then raise Not_found;
-				Parser.ParseSuccess((cfile.c_package,cfile.c_decls),false,cfile.c_pdi)
+				Parser.ParseSuccess((cfile.c_package,cfile.c_decls),cfile.c_pdi)
 			with Not_found ->
 				let parse_result = TypeloadParse.parse_file com rfile p in
 				let info,is_unusual = match parse_result with
 					| ParseError(_,_,_) -> "not cached, has parse error",true
-					| ParseSuccess(data,is_display_file,pdi) ->
+					| ParseSuccess(data,pdi) ->
 						if is_display_file then begin
 							if pdi.pd_errors <> [] then
 								"not cached, is display file with parse errors",true
@@ -76,7 +75,7 @@ let parse_file cs com (rfile : ClassPaths.resolved_file) p =
 							(* We assume that when not in display mode it's okay to cache stuff that has #if display
 							checks. The reasoning is that non-display mode has more information than display mode. *)
 							if com.display.dms_full_typing then raise Not_found;
-							let ident = Hashtbl.find Parser.special_identifier_files fkey in
+							let ident = ThreadSafeHashtbl.find com.parser_state.special_identifier_files fkey in
 							Printf.sprintf "not cached, using \"%s\" define" ident,true
 						with Not_found ->
 							cc#cache_file fkey (ClassPaths.create_resolved_file ffile rfile.class_path) ftime data pdi;
@@ -113,10 +112,9 @@ module Communication = struct
 				end;
 				flush stdout;
 			);
-			exit = (fun code ->
+			exit = (fun timer_ctx code ->
 				if code = 0 then begin
-					Timer.close_times();
-					if !Timer.measure_times then Timer.report_times (fun s -> self.write_err (s ^ "\n"));
+					if timer_ctx.measure_times = Yes then Timer.report_times timer_ctx (fun s -> self.write_err (s ^ "\n"));
 				end;
 				exit code;
 			);
@@ -141,15 +139,13 @@ module Communication = struct
 
 					sctx.was_compilation <- ctx.com.display.dms_full_typing;
 					if has_error ctx then begin
-						measure_times := false;
+						ctx.timer_ctx.measure_times <- No;
 						write "\x02\n"
-					end else begin
-						Timer.close_times();
-						if !Timer.measure_times then Timer.report_times (fun s -> self.write_err (s ^ "\n"));
-					end
+					end else
+						if ctx.timer_ctx.measure_times = Yes then Timer.report_times ctx.timer_ctx (fun s -> self.write_err (s ^ "\n"));
 				)
 			);
-			exit = (fun i ->
+			exit = (fun timer_ctx i ->
 				()
 			);
 			is_server = true;
@@ -163,7 +159,6 @@ let stat dir =
 
 (* Gets a list of changed directories for the current compilation. *)
 let get_changed_directories sctx com =
-	let t = Timer.timer ["server";"module cache";"changed dirs"] in
 	let cs = sctx.cs in
 	let sign = Define.get_signature com.defines in
 	let dirs = try
@@ -223,9 +218,18 @@ let get_changed_directories sctx com =
 		Hashtbl.add sctx.changed_directories sign dirs;
 		dirs
 	in
-	t();
 	dirs
 
+let get_changed_directories sctx com =
+	Timer.time com.Common.timer_ctx ["server";"module cache";"changed dirs"] (get_changed_directories sctx) com
+
+let full_typing com m_extra =
+	com.is_macro_context
+	|| com.display.dms_full_typing
+	|| Define.defined com.defines Define.DisableHxbCache
+	|| Define.defined com.defines Define.DisableHxbOptimizations
+	|| DisplayPosition.display_position#is_in_file (Path.UniqueKey.lazy_key m_extra.m_file)
+
 (* Checks if module [m] can be reused from the cache and returns None in that case. Otherwise, returns
    [Some m'] where [m'] is the module responsible for [m] not being reusable. *)
 let check_module sctx com m_path m_extra p =
@@ -291,7 +295,7 @@ let check_module sctx com m_path m_extra p =
 				end
 		in
 		let has_policy policy = List.mem policy m_extra.m_check_policy || match policy with
-			| NoFileSystemCheck when !ServerConfig.do_not_check_modules && !Parser.display_mode <> DMNone -> true
+			| NoFileSystemCheck when !ServerConfig.do_not_check_modules && com.display.dms_kind <> DMNone -> true
 			| _ -> false
 		in
 		let check_file () =
@@ -310,11 +314,6 @@ let check_module sctx com m_path m_extra p =
 			(com.cs#get_context sign)#find_module_extra mpath
 		in
 		let check_dependencies () =
-			let full_restore =
-				com.is_macro_context
-				|| com.display.dms_full_typing
-				|| DisplayPosition.display_position#is_in_file (Path.UniqueKey.lazy_key m_extra.m_file)
-			in
 			PMap.iter (fun _ mdep ->
 				let sign = mdep.md_sign in
 				let mpath = mdep.md_path in
@@ -326,13 +325,13 @@ let check_module sctx com m_path m_extra p =
 				match check mpath m2_extra with
 				| None -> ()
 				| Some reason -> raise (Dirty (DependencyDirty(mpath,reason)))
-			) (if full_restore then m_extra.m_deps else Option.default m_extra.m_deps m_extra.m_sig_deps)
+			) m_extra.m_deps
 		in
 		let check () =
 			try
 				check_module_path();
 				if not (has_policy NoFileSystemCheck) || Path.file_extension (Path.UniqueKey.lazy_path m_extra.m_file) <> "hx" then check_file();
-				check_dependencies();
+				if full_typing com m_extra then check_dependencies();
 				None
 			with
 			| Dirty reason ->
@@ -394,7 +393,7 @@ let check_module sctx com m_path m_extra p =
 let get_hxb_module com cc path =
 	try
 		let mc = cc#get_hxb_module path in
-		if not com.is_macro_context && not com.display.dms_full_typing && not (DisplayPosition.display_position#is_in_file (Path.UniqueKey.lazy_key mc.mc_extra.m_file)) then begin
+		if not (full_typing com mc.mc_extra) then begin
 			mc.mc_extra.m_cache_state <- MSGood;
 			BinaryModule mc
 		end else
@@ -408,7 +407,7 @@ let get_hxb_module com cc path =
 class hxb_reader_api_server
 	(com : Common.context)
 	(cc : context_cache)
-	(delay : (unit -> unit) -> unit)
+	(delay : TyperPass.typer_pass -> (unit -> unit) -> unit)
 = object(self)
 
 	method make_module (path : path) (file : string) =
@@ -420,7 +419,7 @@ class hxb_reader_api_server
 			m_statics = None;
 			(* Creating a new m_extra because if we keep the same reference, display requests *)
 			(* can alter it with bad data (for example adding dependencies that are not cached) *)
-			m_extra = { mc.mc_extra with m_deps = mc.mc_extra.m_deps }
+			m_extra = { mc.mc_extra with m_deps = mc.mc_extra.m_deps; m_display_deps = None }
 		}
 
 	method add_module (m : module_def) =
@@ -436,21 +435,20 @@ class hxb_reader_api_server
 		| GoodModule m ->
 			m
 		| BinaryModule mc ->
-			let reader = new HxbReader.hxb_reader path com.hxb_reader_stats (Some cc#get_string_pool_arr) (Common.defined com Define.HxbTimes) in
-			let is_display_file = DisplayPosition.display_position#is_in_file (Path.UniqueKey.lazy_key mc.mc_extra.m_file) in
-			let full_restore = com.is_macro_context || com.display.dms_full_typing || is_display_file in
+			let reader = new HxbReader.hxb_reader path com.hxb_reader_stats (if Common.defined com Define.HxbTimes then Some com.timer_ctx else None) in
+			let full_restore = full_typing com mc.mc_extra in
 			let f_next chunks until =
-				let t_hxb = Timer.timer ["server";"module cache";"hxb read";"until " ^ (string_of_chunk_kind until)] in
-				let r = reader#read_chunks_until (self :> HxbReaderApi.hxb_reader_api) chunks until (not full_restore) in
-				t_hxb();
-				r
+				let macro = if com.is_macro_context then " (macro)" else "" in
+				let f  = reader#read_chunks_until (self :> HxbReaderApi.hxb_reader_api) chunks until in
+				Timer.time com.timer_ctx ["server";"module cache";"hxb read" ^ macro;"until " ^ (string_of_chunk_kind until)] f full_restore
 			in
+
 			let m,chunks = f_next mc.mc_chunks EOT in
 
 			(* We try to avoid reading expressions as much as possible, so we only do this for
 				 our current display file if we're in display mode. *)
 			if full_restore then ignore(f_next chunks EOM)
-			else delay (fun () -> ignore(f_next chunks EOF));
+			else delay PConnectField (fun () -> ignore(f_next chunks EOF));
 			m
 		| BadModule reason ->
 			die (Printf.sprintf "Unexpected BadModule %s (%s)" (s_type_path path) (Printer.s_module_skip_reason reason)) __LOC__
@@ -469,7 +467,12 @@ class hxb_reader_api_server
 		i
 
 	method read_expression_eagerly (cf : tclass_field) =
-		com.display.dms_full_typing
+		com.is_macro_context || com.display.dms_full_typing || Define.defined com.defines Define.DisableHxbOptimizations
+
+	method make_lazy_type t f =
+		let r = make_unforced_lazy t f "server-api" in
+		delay PForce (fun () -> ignore(lazy_type r));
+		TLazy r
 end
 
 let handle_cache_bound_objects com cbol =
@@ -503,11 +506,7 @@ let rec add_modules sctx com delay (m : module_def) (from_binary : bool) (p : po
 				if not from_binary || m != m then
 					com.module_lut#add m.m_path m;
 				handle_cache_bound_objects com m.m_extra.m_cache_bound_objects;
-				let full_restore =
-					com.is_macro_context
-					|| com.display.dms_full_typing
-					|| DisplayPosition.display_position#is_in_file (Path.UniqueKey.lazy_key m.m_extra.m_file)
-				in
+				let full_restore = full_typing com m.m_extra in
 				PMap.iter (fun _ mdep ->
 					let mpath = mdep.md_path in
 					if mdep.md_sign = own_sign then begin
@@ -526,7 +525,7 @@ let rec add_modules sctx com delay (m : module_def) (from_binary : bool) (p : po
 						in
 						add_modules (tabs ^ "  ") m0 m2
 					end
-				) (if full_restore then m.m_extra.m_deps else Option.default m.m_extra.m_deps m.m_extra.m_sig_deps)
+				) (if full_restore then m.m_extra.m_deps else Option.default m.m_extra.m_deps m.m_extra.m_display_deps)
 			)
 		end
 	in
@@ -535,23 +534,18 @@ let rec add_modules sctx com delay (m : module_def) (from_binary : bool) (p : po
 (* Looks up the module referred to by [mpath] in the cache. If it exists, a check is made to
    determine if it's still valid. If this function returns None, the module is re-typed. *)
 and type_module sctx com delay mpath p =
-	let t = Timer.timer ["server";"module cache"] in
+	let t = Timer.start_timer com.timer_ctx ["server";"module cache"] in
 	let cc = CommonCache.get_cache com in
 	let skip m_path reason =
 		ServerMessage.skipping_dep com "" (m_path,(Printer.s_module_skip_reason reason));
 		BadModule reason
 	in
 	let add_modules from_binary m =
-		let tadd = Timer.timer ["server";"module cache";"add modules"] in
-		add_modules sctx com delay m from_binary p;
-		tadd();
+		Timer.time com.timer_ctx ["server";"module cache";"add modules"] (add_modules sctx com delay m from_binary) p;
 		GoodModule m
 	in
 	let check_module sctx m_path m_extra p =
-		let tcheck = Timer.timer ["server";"module cache";"check"] in
-		let r = check_module sctx com mpath m_extra p in
-		tcheck();
-		r
+		Timer.time com.timer_ctx ["server";"module cache";"check"] (check_module sctx com mpath m_extra) p
 	in
 	let find_module_in_cache cc m_path p =
 		try
@@ -578,9 +572,8 @@ and type_module sctx com delay mpath p =
 			   checking dependencies. This means that the actual decoding never has any reason to fail. *)
 			begin match check_module sctx mpath mc.mc_extra p with
 				| None ->
-					let reader = new HxbReader.hxb_reader mpath com.hxb_reader_stats (Some cc#get_string_pool_arr) (Common.defined com Define.HxbTimes) in
-					let is_display_file = DisplayPosition.display_position#is_in_file (Path.UniqueKey.lazy_key mc.mc_extra.m_file) in
-					let full_restore = com.is_macro_context || com.display.dms_full_typing || is_display_file in
+					let reader = new HxbReader.hxb_reader mpath com.hxb_reader_stats (if Common.defined com Define.HxbTimes then Some com.timer_ctx else None) in
+					let full_restore = full_typing com mc.mc_extra in
 					let api = match com.hxb_reader_api with
 						| Some api ->
 							api
@@ -590,16 +583,16 @@ and type_module sctx com delay mpath p =
 							api
 					in
 					let f_next chunks until =
-						let t_hxb = Timer.timer ["server";"module cache";"hxb read";"until " ^ (string_of_chunk_kind until)] in
-						let r = reader#read_chunks_until api chunks until (not full_restore) in
-						t_hxb();
-						r
+						let macro = if com.is_macro_context then " (macro)" else "" in
+						Timer.time com.timer_ctx ["server";"module cache";"hxb read" ^ macro;"until " ^ (string_of_chunk_kind until)] (reader#read_chunks_until api chunks until) full_restore
 					in
+
 					let m,chunks = f_next mc.mc_chunks EOT in
+
 					(* We try to avoid reading expressions as much as possible, so we only do this for
 					   our current display file if we're in display mode. *)
 					if full_restore then ignore(f_next chunks EOM)
-					else delay (fun () -> ignore(f_next chunks EOF));
+					else delay PConnectField (fun () -> ignore(f_next chunks EOF));
 					add_modules true m;
 				| Some reason ->
 					skip mpath reason
@@ -779,7 +772,7 @@ let enable_cache_mode sctx =
 	TypeloadParse.parse_hook := parse_file sctx.cs
 
 let rec process sctx comm args =
-	let t0 = get_time() in
+	let t0 = Extc.time() in
 	ServerMessage.arguments args;
 	reset sctx;
 	let api = {
@@ -802,7 +795,7 @@ let rec process sctx comm args =
 	} in
 	Compiler.HighLevel.entry api comm args;
 	run_delays sctx;
-	ServerMessage.stats stats (get_time() -. t0)
+	ServerMessage.stats stats (Extc.time() -. t0)
 
 (* The server main loop. Waits for the [accept] call to then process the sent compilation
    parameters through [process_params]. *)

+ 2 - 10
src/compiler/serverCompilationContext.ml

@@ -1,5 +1,4 @@
 open Common
-open Timer
 open CompilationCache
 
 type t = {
@@ -45,22 +44,15 @@ let reset sctx =
 	Hashtbl.clear sctx.changed_directories;
 	sctx.was_compilation <- false;
 	Parser.reset_state();
-	Lexer.cur := Lexer.make_file "";
-	measure_times := false;
 	Hashtbl.clear DeprecationCheck.warned_positions;
-	close_times();
 	stats.s_files_parsed := 0;
 	stats.s_classes_built := 0;
 	stats.s_methods_typed := 0;
-	stats.s_macros_called := 0;
-	Hashtbl.clear Timer.htimers;
-	Helper.start_time := get_time()
+	stats.s_macros_called := 0
 
 let maybe_cache_context sctx com =
 	if com.display.dms_full_typing && com.display.dms_populate_cache then begin
-		let t = Timer.timer ["server";"cache context"] in
-		CommonCache.cache_context sctx.cs com;
-		t();
+		Timer.time com.timer_ctx ["server";"cache context"] (CommonCache.cache_context sctx.cs) com;
 		ServerMessage.cached_modules com "" (List.length com.modules);
 	end
 

+ 2 - 2
src/compiler/tasks.ml

@@ -6,7 +6,7 @@ class gc_task (max_working_memory : float) (heap_size : float) = object(self)
 	inherit server_task ["gc"] 100
 
 	method private execute =
-		let t0 = Timer.get_time() in
+		let t0 = Extc.time() in
 		let stats = Gc.stat() in
 		let live_words = float_of_int stats.live_words in
 		(* Maximum heap size needed for the last X compilations = sum of what's live + max working memory. *)
@@ -27,7 +27,7 @@ class gc_task (max_working_memory : float) (heap_size : float) = object(self)
 			Gc.full_major();
 		end;
 		Gc.set old_gc;
-		ServerMessage.gc_stats (Timer.get_time() -. t0) stats do_compact new_space_overhead
+		ServerMessage.gc_stats (Extc.time() -. t0) stats do_compact new_space_overhead
 end
 
 class class_maintenance_task (cs : CompilationCache.t) (c : tclass) = object(self)

+ 23 - 20
src/context/abstractCast.ml

@@ -7,6 +7,7 @@ open Error
 
 let cast_stack = new_rec_stack()
 
+
 let rec make_static_call ctx c cf a pl args t p =
 	if cf.cf_kind = Method MethMacro then begin
 		match args with
@@ -23,7 +24,7 @@ let rec make_static_call ctx c cf a pl args t p =
 				e
 			| _ -> die "" __LOC__
 	end else
-		Typecore.make_static_call ctx c cf (apply_params a.a_params pl) args t p
+		CallUnification.make_static_call_better ctx c cf pl args t p
 
 and do_check_cast ctx uctx tleft eright p =
 	let recurse cf f =
@@ -110,7 +111,7 @@ and do_check_cast ctx uctx tleft eright p =
 
 and cast_or_unify_raise ctx ?(uctx=None) tleft eright p =
 	let uctx = match uctx with
-		| None -> default_unification_context
+		| None -> default_unification_context ()
 		| Some uctx -> uctx
 	in
 	try
@@ -127,7 +128,7 @@ and cast_or_unify ctx tleft eright p =
 		eright
 
 let prepare_array_access_field ctx a pl cf p =
-	let monos = List.map (fun _ -> spawn_monomorph ctx.e p) cf.cf_params in
+	let monos = List.map (fun _ -> spawn_monomorph ctx p) cf.cf_params in
 	let map t = apply_params a.a_params pl (apply_params cf.cf_params monos t) in
 	let check_constraints () =
 		List.iter2 (fun m ttp -> match get_constraints ttp with
@@ -199,11 +200,11 @@ let find_array_write_access ctx a tl e1 e2 p =
 		let s_type = s_type (print_context()) in
 		raise_typing_error (Printf.sprintf "No @:arrayAccess function for %s accepts arguments of %s and %s" (s_type (TAbstract(a,tl))) (s_type e1.etype) (s_type e2.etype)) p
 
-let find_multitype_specialization com a pl p =
-	let uctx = default_unification_context in
+let find_multitype_specialization' platform a pl p =
+	let uctx = default_unification_context () in
 	let m = mk_mono() in
 	let tl,definitive_types = Abstract.find_multitype_params a pl in
-	if com.platform = Globals.Js && a.a_path = (["haxe";"ds"],"Map") then begin match tl with
+	if platform = Globals.Js && a.a_path = (["haxe";"ds"],"Map") then begin match tl with
 		| t1 :: _ ->
 			let stack = ref [] in
 			let rec loop t =
@@ -241,9 +242,13 @@ let find_multitype_specialization com a pl p =
 			else
 				raise_typing_error ("Abstract " ^ (s_type_path a.a_path) ^ " has no @:to function that accepts " ^ st) p;
 	in
-	cf, follow m
+	cf,follow m,tl
+
+let find_multitype_specialization platform a pl p =
+	let cf,m,_ = find_multitype_specialization' platform a pl p in
+	(cf,m)
 
-let handle_abstract_casts ctx e =
+let handle_abstract_casts (scom : SafeCom.t) e =
 	let rec loop e = match e.eexpr with
 		| TNew({cl_kind = KAbstractImpl a} as c,pl,el) ->
 			if not (Meta.has Meta.MultiType a.a_meta) then begin
@@ -254,24 +259,22 @@ let handle_abstract_casts ctx e =
 				| _ -> raise_typing_error ("Cannot construct " ^ (s_type (print_context()) (TAbstract(a,pl)))) e.epos
 			end else begin
 				(* a TNew of an abstract implementation is only generated if it is a multi type abstract *)
-				let cf,m = find_multitype_specialization ctx.com a pl e.epos in
-				let e = make_static_call ctx c cf a pl ((mk (TConst TNull) (TAbstract(a,pl)) e.epos) :: el) m e.epos in
+				let cf,m,pl = find_multitype_specialization' scom.platform a pl e.epos in
+				let e = ExceptionFunctions.make_static_call scom c cf ((mk (TConst TNull) (TAbstract(a,pl)) e.epos) :: el)  m e.epos in
 				{e with etype = m}
 			end
 		| TCall({eexpr = TField(_,FStatic({cl_path=[],"Std"},{cf_name = "string"}))},[e1]) when (match follow e1.etype with TAbstract({a_impl = Some _},_) -> true | _ -> false) ->
 			begin match follow e1.etype with
-				| TAbstract({a_impl = Some c} as a,tl) ->
+				| TAbstract({a_impl = Some c},tl) ->
 					begin try
 						let cf = PMap.find "toString" c.cl_statics in
-						let call() = make_static_call ctx c cf a tl [e1] ctx.t.tstring e.epos in
-						if not ctx.allow_transform then
-							{ e1 with etype = ctx.t.tstring; epos = e.epos }
-						else if not (is_nullable e1.etype) then
+						let call() = ExceptionFunctions.make_static_call scom c cf [e1] scom.basic.tstring e.epos in
+						if not (is_nullable e1.etype) then
 							call()
 						else begin
 							let p = e.epos in
-							let chk_null = mk (TBinop (Ast.OpEq, e1, mk (TConst TNull) e1.etype p)) ctx.com.basic.tbool p in
-							mk (TIf (chk_null, mk (TConst (TString "null")) ctx.com.basic.tstring p, Some (call()))) ctx.com.basic.tstring p
+							let chk_null = mk (TBinop (Ast.OpEq, e1, mk (TConst TNull) e1.etype p)) scom.basic.tbool p in
+							mk (TIf (chk_null, mk (TConst (TString "null")) scom.basic.tstring p, Some (call()))) scom.basic.tstring p
 						end
 					with Not_found ->
 						e
@@ -339,14 +342,14 @@ let handle_abstract_casts ctx e =
 								else
 									el
 							in
-							let ecall = make_call ctx ef el tr e.epos in
+							let ecall = ExceptionFunctions.make_call scom ef el tr e.epos in
 							maybe_cast ecall e.etype e.epos
 						with Not_found ->
 							(* quick_field raises Not_found if m is an abstract, we have to replicate the 'using' call here *)
 							match follow m with
-							| TAbstract({a_impl = Some c} as a,pl) ->
+							| TAbstract({a_impl = Some c},pl) ->
 								let cf = PMap.find fname c.cl_statics in
-								make_static_call ctx c cf a pl (e2 :: el) e.etype e.epos
+								ExceptionFunctions.make_static_call scom c cf  (e2 :: el) e.etype e.epos
 							| _ -> raise Not_found
 						end
 					| _ ->

+ 57 - 142
src/context/common.ml

@@ -45,127 +45,6 @@ type stats = {
 	s_macros_called : int ref;
 }
 
-(**
-	The capture policy tells which handling we make of captured locals
-	(the locals which are referenced in local functions)
-
-	See details/implementation in Codegen.captured_vars
-*)
-type capture_policy =
-	(** do nothing, let the platform handle it *)
-	| CPNone
-	(** wrap all captured variables into a single-element array to allow modifications *)
-	| CPWrapRef
-	(** similar to wrap ref, but will only apply to the locals that are declared in loops *)
-	| CPLoopVars
-
-type exceptions_config = {
-	(* Base types which may be thrown from Haxe code without wrapping. *)
-	ec_native_throws : path list;
-	(* Base types which may be caught from Haxe code without wrapping. *)
-	ec_native_catches : path list;
-	(*
-		Hint exceptions filter to avoid wrapping for targets, which can throw/catch any type
-		Ignored on targets with a specific native base type for exceptions.
-	*)
-	ec_avoid_wrapping : bool;
-	(* Path of a native class or interface, which can be used for wildcard catches. *)
-	ec_wildcard_catch : path;
-	(*
-		Path of a native base class or interface, which can be thrown.
-		This type is used to cast `haxe.Exception.thrown(v)` calls to.
-		For example `throw 123` is compiled to `throw (cast Exception.thrown(123):ec_base_throw)`
-	*)
-	ec_base_throw : path;
-	(*
-		Checks if throwing this expression is a special case for current target
-		and should not be modified.
-	*)
-	ec_special_throw : texpr -> bool;
-}
-
-type var_scope =
-	| FunctionScope
-	| BlockScope
-
-type var_scoping_flags =
-	(**
-		Variables are hoisted in their scope
-	*)
-	| VarHoisting
-	(**
-		It's not allowed to shadow existing variables in a scope.
-	*)
-	| NoShadowing
-	(**
-		It's not allowed to shadow a `catch` variable.
-	*)
-	| NoCatchVarShadowing
-	(**
-		Local vars cannot have the same name as the current top-level package or
-		(if in the root package) current class name
-	*)
-	| ReserveCurrentTopLevelSymbol
-	(**
-		Local vars cannot have a name used for any top-level symbol
-		(packages and classes in the root package)
-	*)
-	| ReserveAllTopLevelSymbols
-	(**
-		Reserve all type-paths converted to "flat path" with `Path.flat_path`
-	*)
-	| ReserveAllTypesFlat
-	(**
-		List of names cannot be taken by local vars
-	*)
-	| ReserveNames of string list
-	(**
-		Cases in a `switch` won't have blocks, but will share the same outer scope.
-	*)
-	| SwitchCasesNoBlocks
-
-type var_scoping_config = {
-	vs_flags : var_scoping_flags list;
-	vs_scope : var_scope;
-}
-
-type platform_config = {
-	(** has a static type system, with not-nullable basic types (Int/Float/Bool) *)
-	pf_static : bool;
-	(** has access to the "sys" package *)
-	pf_sys : bool;
-	(** captured variables handling (see before) *)
-	pf_capture_policy : capture_policy;
-	(** when calling a method with optional args, do we replace the missing args with "null" constants *)
-	pf_pad_nulls : bool;
-	(** add a final return to methods not having one already - prevent some compiler warnings *)
-	pf_add_final_return : bool;
-	(** does the platform natively support overloaded functions *)
-	pf_overload : bool;
-	(** can the platform use default values for non-nullable arguments *)
-	pf_can_skip_non_nullable_argument : bool;
-	(** type paths that are reserved on the platform *)
-	pf_reserved_type_paths : path list;
-	(** supports function == function **)
-	pf_supports_function_equality : bool;
-	(** uses utf16 encoding with ucs2 api **)
-	pf_uses_utf16 : bool;
-	(** target supports accessing `this` before calling `super(...)` **)
-	pf_this_before_super : bool;
-	(** target supports threads **)
-	pf_supports_threads : bool;
-	(** target supports Unicode **)
-	pf_supports_unicode : bool;
-	(** target supports rest arguments **)
-	pf_supports_rest_args : bool;
-	(** exceptions handling config **)
-	pf_exceptions : exceptions_config;
-	(** the scoping of local variables *)
-	pf_scoping : var_scoping_config;
-	(** target supports atomic operations via haxe.Atomic **)
-	pf_supports_atomics : bool;
-}
-
 class compiler_callbacks = object(self)
 	val before_typer_create = ref [];
 	val after_init_macros = ref [];
@@ -232,9 +111,10 @@ class file_keys = object(self)
 
 	val virtual_counter = ref 0
 
-	method generate_virtual step =
+	method generate_virtual mpath step =
 		incr virtual_counter;
-		Printf.sprintf "file_%i_%i" step !virtual_counter
+		let base = match fst mpath with | [] -> "." | pack -> ExtLib.String.join "/" pack in
+		Printf.sprintf "%s/%s_%i_%i" base (snd mpath) step !virtual_counter
 
 end
 
@@ -347,6 +227,13 @@ class virtual abstract_hxb_lib = object(self)
 	method virtual get_string_pool : string -> string array option
 end
 
+type parser_state = {
+	mutable was_auto_triggered : bool;
+	mutable had_parser_resume : bool;
+	delayed_syntax_completion : Parser.syntax_completion_on option Atomic.t;
+	special_identifier_files : (Path.UniqueKey.t,string) ThreadSafeHashtbl.t;
+}
+
 type context = {
 	compilation_step : int;
 	mutable stage : compiler_stage;
@@ -354,20 +241,23 @@ type context = {
 	mutable cache : CompilationCache.context_cache option;
 	is_macro_context : bool;
 	mutable json_out : json_api option;
+	timer_ctx : Timer.timer_context;
 	(* config *)
-	version : int;
+	version : compiler_version;
 	mutable args : string list;
 	mutable display : DisplayTypes.DisplayMode.settings;
 	mutable debug : bool;
 	mutable verbose : bool;
 	mutable foptimize : bool;
+	mutable doinline : bool;
 	mutable platform : platform;
-	mutable config : platform_config;
+	mutable config : PlatformConfig.platform_config;
 	empty_class_path : ClassPath.class_path;
 	class_paths : ClassPaths.class_paths;
 	main : Gctx.context_main;
 	mutable package_rules : (string,package_rule) PMap.t;
 	mutable report_mode : report_mode;
+	parser_state : parser_state;
 	(* communication *)
 	mutable print : string -> unit;
 	mutable error : Gctx.error_function;
@@ -451,8 +341,8 @@ let to_gctx com = {
 		| _ -> []);
 	include_files = com.include_files;
 	std = com.std;
+	timer_ctx = com.timer_ctx;
 }
-
 let enter_stage com stage =
 	(* print_endline (Printf.sprintf "Entering stage %s" (s_compiler_stage stage)); *)
 	com.stage <- stage
@@ -564,6 +454,8 @@ let stats =
 		s_macros_called = ref 0;
 	}
 
+open PlatformConfig
+
 let default_config =
 	{
 		pf_static = true;
@@ -649,6 +541,9 @@ let get_config com =
 			pf_supports_unicode = false;
 			pf_scoping = { default_config.pf_scoping with
 				vs_flags = [ReserveAllTopLevelSymbols];
+			};
+			pf_exceptions = { default_config.pf_exceptions with
+				ec_avoid_wrapping = false
 			}
 		}
 	| Flash ->
@@ -769,6 +664,9 @@ let get_config com =
 				vs_scope = BlockScope;
 				vs_flags = [NoShadowing]
 			};
+			pf_exceptions = { default_config.pf_exceptions with
+				ec_avoid_wrapping = false
+			}
 		}
 	| Eval ->
 		{
@@ -778,15 +676,19 @@ let get_config com =
 			pf_uses_utf16 = false;
 			pf_supports_threads = true;
 			pf_capture_policy = CPWrapRef;
+			pf_exceptions = { default_config.pf_exceptions with
+				ec_avoid_wrapping = false
+			}
 		}
 
 let memory_marker = [|Unix.time()|]
 
-let create compilation_step cs version args display_mode =
+let create timer_ctx compilation_step cs version args display_mode =
 	let rec com = {
 		compilation_step = compilation_step;
 		cs = cs;
 		cache = None;
+		timer_ctx = timer_ctx;
 		stage = CCreated;
 		version = version;
 		args = args;
@@ -804,6 +706,7 @@ let create compilation_step cs version args display_mode =
 		display = display_mode;
 		verbose = false;
 		foptimize = true;
+		doinline = true;
 		features = Hashtbl.create 0;
 		platform = Cross;
 		config = default_config;
@@ -813,7 +716,8 @@ let create compilation_step cs version args display_mode =
 		empty_class_path = new ClassPath.directory_class_path "" User;
 		class_paths = new ClassPaths.class_paths;
 		main = {
-			main_class = None;
+			main_path = None;
+			main_file = None;
 			main_expr = None;
 		};
 		package_rules = PMap.empty;
@@ -835,10 +739,7 @@ let create compilation_step cs version args display_mode =
 		include_files = [];
 		js_gen = None;
 		load_extern_type = [];
-		defines = {
-			defines_signature = None;
-			values = PMap.empty;
-		};
+		defines = Define.empty_defines ();
 		user_defines = Hashtbl.create 0;
 		user_metas = Hashtbl.create 0;
 		get_macros = (fun() -> None);
@@ -878,6 +779,12 @@ let create compilation_step cs version args display_mode =
 		hxb_reader_api = None;
 		hxb_reader_stats = HxbReader.create_hxb_reader_stats ();
 		hxb_writer_config = None;
+		parser_state = {
+			was_auto_triggered = false;
+			had_parser_resume = false;
+			delayed_syntax_completion = Atomic.make None;
+			special_identifier_files = ThreadSafeHashtbl.create 0;
+		}
 	} in
 	com
 
@@ -909,7 +816,8 @@ let clone com is_macro_context =
 			tstring = mk_mono();
 		};
 		main = {
-			main_class = None;
+			main_path = None;
+			main_file = None;
 			main_expr = None;
 		};
 		features = Hashtbl.create 0;
@@ -967,7 +875,7 @@ let init_platform com =
 	end;
 	(* Set the source header, unless the user has set one already or the platform sets a custom one *)
 	if not (defined com Define.SourceHeader) && (com.platform <> Hl) then
-		define_value com Define.SourceHeader ("Generated by Haxe " ^ s_version_full);
+		define_value com Define.SourceHeader ("Generated by Haxe " ^ (s_version_full com.version));
 	let forbid acc p = if p = name || PMap.mem p acc then acc else PMap.add p Forbidden acc in
 	com.package_rules <- List.fold_left forbid com.package_rules ("java" :: (List.map platform_name platforms));
 	update_platform_config com;
@@ -1077,10 +985,6 @@ let platform_name_macro com =
 let find_file ctx f =
 	(ctx.class_paths#find_file f).file
 
-(* let find_file ctx f =
-	let timer = Timer.timer ["find_file"] in
-	Std.finally timer (find_file ctx) f *)
-
 let mem_size v =
 	Objsize.size_with_headers (Objsize.objsize v [] [])
 
@@ -1107,9 +1011,6 @@ let display_error_ext com err =
 let display_error com ?(depth = 0) msg p =
 	display_error_ext com (Error.make_error ~depth (Custom msg) p)
 
-let dump_path com =
-	Define.defined_value_safe ~default:"dump" com.defines Define.DumpPath
-
 let adapt_defines_to_macro_context defines =
 	let to_remove = "java" :: List.map Globals.platform_name Globals.platforms in
 	let to_remove = List.fold_left (fun acc d -> Define.get_define_key d :: acc) to_remove [Define.NoTraces] in
@@ -1142,4 +1043,18 @@ let get_entry_point com =
 		in
 		let e = Option.get com.main.main_expr in (* must be present at this point *)
 		(snd path, c, e)
-	) com.main.main_class
+	) com.main.main_path
+
+let make_unforced_lazy t_proc f where =
+	let r = ref (lazy_available t_dynamic) in
+	r := lazy_wait (fun() ->
+		try
+			r := lazy_processing t_proc;
+			let t = f () in
+			r := lazy_available t;
+			t
+		with
+			| Error.Error e ->
+				raise (Error.Fatal_error e)
+	);
+	r

+ 21 - 14
src/context/commonCache.ml

@@ -85,14 +85,12 @@ let rec cache_context cs com =
 	let cc = get_cache com in
 	let sign = Define.get_signature com.defines in
 
-	let cache_module =
+	let parallels = DynArray.create () in
+	let cache_module m =
 		if Define.defined com.defines DisableHxbCache then
-			let cache_module m =
-				(* If we have a signature mismatch, look-up cache for module. Physical equality check is fine as a heuristic. *)
-				let cc = if m.m_extra.m_sign = sign then cc else cs#get_context m.m_extra.m_sign in
-				cc#cache_module_in_memory m.m_path m;
-			in
-			cache_module
+			(* If we have a signature mismatch, look-up cache for module. Physical equality check is fine as a heuristic. *)
+			let cc = if m.m_extra.m_sign = sign then cc else cs#get_context m.m_extra.m_sign in
+			cc#cache_module_in_memory m.m_path m;
 		else
 			let anon_identification = new Tanon_identification.tanon_identification in
 			let warn w s p = com.warning w com.warning_options s p in
@@ -102,15 +100,24 @@ let rec cache_context cs com =
 				| Some config ->
 					if com.is_macro_context then config.macro_config else config.target_config
 			in
-			let cache_module m =
-				(* If we have a signature mismatch, look-up cache for module. Physical equality check is fine as a heuristic. *)
-				let cc = if m.m_extra.m_sign = sign then cc else cs#get_context m.m_extra.m_sign in
-				cc#cache_hxb_module config warn anon_identification m.m_path m;
-			in
-			cache_module
+			(* If we have a signature mismatch, look-up cache for module. Physical equality check is fine as a heuristic. *)
+			let cc = if m.m_extra.m_sign = sign then cc else cs#get_context m.m_extra.m_sign in
+			match cc#cache_hxb_module config warn anon_identification m with
+			| None ->
+				()
+			| Some f ->
+				DynArray.add parallels (cc,m,f)
 	in
-
 	List.iter cache_module com.modules;
+	let a = Parallel.run_in_new_pool com.timer_ctx (fun pool ->
+		Parallel.ParallelArray.map pool (fun (cc,m,f) ->
+			let chunks = f() in
+			(cc,m,chunks)
+		) (DynArray.to_array parallels) (cc,null_module,[])
+	) in
+	Array.iter (fun (cc,m,chunks) ->
+		cc#add_binary_cache m chunks
+	) a;
 	begin match com.get_macros() with
 		| None -> ()
 		| Some com -> cache_context cs com

+ 1 - 1
src/context/display/diagnostics.ml

@@ -77,7 +77,7 @@ let check_other_things com e =
 		| TCall({eexpr = TField(e1,fa)},el) when not in_value && PurityState.is_pure_field_access fa -> compound "call" el e.epos
 		| TNew _ | TCall _ | TBinop ((Ast.OpAssignOp _ | Ast.OpAssign),_,_) | TUnop ((Ast.Increment | Ast.Decrement),_,_)
 		| TReturn _ | TBreak | TContinue | TThrow _ | TCast (_,Some _)
-		| TIf _ | TTry _ | TSwitch _ | TWhile _ | TFor _ ->
+		| TIf _ | TTry _ | TSwitch _ | TWhile _ ->
 			had_effect := true;
 			Type.iter (loop true) e
 		| TParenthesis e1 | TMeta(_,e1) ->

+ 2 - 2
src/context/display/display.ml

@@ -31,8 +31,8 @@ module ReferencePosition = struct
 end
 
 let preprocess_expr com e = match com.display.dms_kind with
-	| DMDefinition | DMTypeDefinition | DMUsage _ | DMImplementation | DMHover | DMDefault -> ExprPreprocessing.find_before_pos com.display.dms_kind e
-	| DMSignature -> ExprPreprocessing.find_display_call e
+	| DMDefinition | DMTypeDefinition | DMUsage _ | DMImplementation | DMHover | DMDefault -> ExprPreprocessing.find_before_pos com.parser_state.was_auto_triggered com.display.dms_kind e
+	| DMSignature -> ExprPreprocessing.find_display_call com.parser_state.was_auto_triggered e
 	| _ -> e
 
 let sort_fields l with_type tk =

+ 3 - 1
src/context/display/displayFields.ml

@@ -49,7 +49,7 @@ let collect_static_extensions ctx items e p =
 	let rec dup t = Type.map dup t in
 	let handle_field c f acc =
 		let f = { f with cf_type = opt_type f.cf_type } in
-		let monos = List.map (fun _ -> spawn_monomorph ctx.e p) f.cf_params in
+		let monos = List.map (fun _ -> spawn_monomorph ctx p) f.cf_params in
 		let map = apply_params f.cf_params monos in
 		match follow (map f.cf_type) with
 		| TFun((_,_,TType({t_path=["haxe";"macro"], "ExprOf"}, [t])) :: args, ret)
@@ -169,6 +169,8 @@ let collect ctx e_ast e dk with_type p =
 					end
 				end else
 					loop items (mk_anon ~fields (ref Closed))
+			| CTypes [(t,_)] ->
+				loop items t
 			| CTypes tl ->
 				items
 			| CUnknown ->

+ 25 - 23
src/context/display/displayJson.ml

@@ -51,7 +51,6 @@ class display_handler (jsonrpc : jsonrpc_handler) com (cs : CompilationCache.t)
 
 	method enable_display ?(skip_define=false) mode =
 		com.display <- create mode;
-		Parser.display_mode := mode;
 		if not skip_define then Common.define_value com Define.Display "1"
 
 	method set_display_file was_auto_triggered requires_offset =
@@ -65,7 +64,7 @@ class display_handler (jsonrpc : jsonrpc_handler) com (cs : CompilationCache.t)
 		) None in
 
 		let pos = if requires_offset then jsonrpc#get_int_param "offset" else (-1) in
-		Parser.was_auto_triggered := was_auto_triggered;
+		com.parser_state.was_auto_triggered <- was_auto_triggered;
 
 		if file <> file_input_marker then begin
 			let file_unique = com.file_keys#get file in
@@ -105,7 +104,7 @@ class display_handler (jsonrpc : jsonrpc_handler) com (cs : CompilationCache.t)
 end
 
 class hxb_reader_api_com
-	~(minimal_restore : bool)
+	~(full_restore : bool)
 	(com : Common.context)
 	(cc : CompilationCache.context_cache)
 = object(self)
@@ -118,7 +117,7 @@ class hxb_reader_api_com
 			m_statics = None;
 			(* Creating a new m_extra because if we keep the same reference, display requests *)
 			(* can alter it with bad data (for example adding dependencies that are not cached) *)
-			m_extra = { mc.mc_extra with m_deps = mc.mc_extra.m_deps }
+			m_extra = { mc.mc_extra with m_deps = mc.mc_extra.m_deps; m_display_deps = None }
 		}
 
 	method add_module (m : module_def) =
@@ -139,8 +138,8 @@ class hxb_reader_api_com
 			cc#find_module m_path
 		with Not_found ->
 			let mc = cc#get_hxb_module m_path in
-			let reader = new HxbReader.hxb_reader mc.mc_path com.hxb_reader_stats (Some cc#get_string_pool_arr) (Common.defined com Define.HxbTimes) in
-			fst (reader#read_chunks_until (self :> HxbReaderApi.hxb_reader_api) mc.mc_chunks (if minimal_restore then MTF else EOM) minimal_restore)
+			let reader = new HxbReader.hxb_reader mc.mc_path com.hxb_reader_stats (if Common.defined com Define.HxbTimes then Some com.timer_ctx else None) in
+			fst (reader#read_chunks_until (self :> HxbReaderApi.hxb_reader_api) mc.mc_chunks (if full_restore then EOM else MTF) full_restore)
 
 	method basic_types =
 		com.basic
@@ -150,10 +149,13 @@ class hxb_reader_api_com
 
 	method read_expression_eagerly (cf : tclass_field) =
 		false
+
+	method make_lazy_type t f =
+		TLazy (make_unforced_lazy t f "com-api")
 end
 
-let find_module ~(minimal_restore : bool) com cc path =
-	(new hxb_reader_api_com ~minimal_restore com cc)#find_module path
+let find_module ~(full_restore : bool) com cc path =
+	(new hxb_reader_api_com ~full_restore com cc)#find_module path
 
 type handler_context = {
 	com : Common.context;
@@ -176,11 +178,11 @@ let handler =
 			hctx.send_result (JObject [
 				"methods",jarray methods;
 				"haxeVersion",jobject [
-					"major",jint version_major;
-					"minor",jint version_minor;
-					"patch",jint version_revision;
-					"pre",(match version_pre with None -> jnull | Some pre -> jstring pre);
-					"build",(match Version.version_extra with None -> jnull | Some(_,build) -> jstring build);
+					"major",jint hctx.com.version.major;
+					"minor",jint hctx.com.version.minor;
+					"patch",jint hctx.com.version.revision;
+					"pre",(match hctx.com.version.pre with None -> jnull | Some pre -> jstring pre);
+					"build",(match hctx.com.version.extra with None -> jnull | Some(_,build) -> jstring build);
 				];
 				"protocolVersion",jobject [
 					"major",jint 0;
@@ -347,12 +349,13 @@ let handler =
 			let path = Path.parse_path (hctx.jsonrpc#get_string_param "path") in
 			let cs = hctx.display#get_cs in
 			let cc = cs#get_context sign in
+			let full_restore = Define.defined hctx.com.defines Define.DisableHxbOptimizations in
 			let m = try
-				find_module ~minimal_restore:true hctx.com cc path
+				find_module ~full_restore hctx.com cc path
 			with Not_found ->
 				hctx.send_error [jstring "No such module"]
 			in
-			hctx.send_result (generate_module (cc#get_hxb) (find_module ~minimal_restore:true hctx.com cc) m)
+			hctx.send_result (generate_module (cc#get_hxb) (find_module ~full_restore hctx.com cc) m)
 		);
 		"server/type", (fun hctx ->
 			let sign = Digest.from_hex (hctx.jsonrpc#get_string_param "signature") in
@@ -360,7 +363,7 @@ let handler =
 			let typeName = hctx.jsonrpc#get_string_param "typeName" in
 			let cc = hctx.display#get_cs#get_context sign in
 			let m = try
-				find_module ~minimal_restore:true hctx.com cc path
+				find_module ~full_restore:true hctx.com cc path
 			with Not_found ->
 				hctx.send_error [jstring "No such module"]
 			in
@@ -463,18 +466,18 @@ let handler =
 			hctx.send_result (jarray !l)
 		);
 		"server/memory",(fun hctx ->
-			let j = Memory.get_memory_json hctx.display#get_cs MCache in
+			let j = DisplayMemory.get_memory_json hctx.display#get_cs MCache in
 			hctx.send_result j
 		);
 		"server/memory/context",(fun hctx ->
 			let sign = Digest.from_hex (hctx.jsonrpc#get_string_param "signature") in
-			let j = Memory.get_memory_json hctx.display#get_cs (MContext sign) in
+			let j = DisplayMemory.get_memory_json hctx.display#get_cs (MContext sign) in
 			hctx.send_result j
 		);
 		"server/memory/module",(fun hctx ->
 			let sign = Digest.from_hex (hctx.jsonrpc#get_string_param "signature") in
 			let path = Path.parse_path (hctx.jsonrpc#get_string_param "path") in
-			let j = Memory.get_memory_json hctx.display#get_cs (MModule(sign,path)) in
+			let j = DisplayMemory.get_memory_json hctx.display#get_cs (MModule(sign,path)) in
 			hctx.send_result j
 		);
 		(* TODO: wait till gama complains about the naming, then change it to something else *)
@@ -489,7 +492,7 @@ let handler =
 	List.iter (fun (s,f) -> Hashtbl.add h s f) l;
 	h
 
-let parse_input com input report_times =
+let parse_input com input =
 	let input =
 		JsonRpc.handle_jsonrpc_error (fun () -> JsonRpc.parse_request input) send_json
 	in
@@ -502,9 +505,8 @@ let parse_input com input report_times =
 			"result",json;
 			"timestamp",jfloat (Unix.gettimeofday ());
 		] in
-		let fl = if !report_times then begin
-			close_times();
-			let _,_,root = Timer.build_times_tree () in
+		let fl = if com.timer_ctx.measure_times = Yes then begin
+			let _,_,root = Timer.build_times_tree com.timer_ctx in
 			begin match json_of_times root with
 			| None -> fl
 			| Some jo -> ("timers",jo) :: fl

+ 256 - 0
src/context/display/displayMemory.ml

@@ -0,0 +1,256 @@
+open Globals
+open Common
+open Memory
+open Genjson
+open Type
+
+let get_memory_json (cs : CompilationCache.t) mreq =
+	begin match mreq with
+	| MCache ->
+		let old_gc = Gc.get() in
+		Gc.set { old_gc with
+			Gc.max_overhead = 0;
+			Gc.space_overhead = 0
+		};
+		Gc.compact();
+		Gc.set old_gc;
+		let stat = Gc.quick_stat() in
+		let size = (float_of_int stat.Gc.heap_words) *. (float_of_int (Sys.word_size / 8)) in
+		let cache_mem = cs#get_pointers in
+		let contexts = cs#get_contexts in
+		let j_contexts = List.map (fun cc -> jobject [
+			"context",cc#get_json;
+			"size",jint (mem_size cc);
+		]) contexts in
+		let mem_size_2 v exclude =
+			Objsize.size_with_headers (Objsize.objsize v exclude [])
+		in
+		jobject [
+			"contexts",jarray j_contexts;
+			"memory",jobject [
+				"totalCache",jint (mem_size cs);
+				"contextCache",jint (mem_size cache_mem.(0));
+				"haxelibCache",jint (mem_size cache_mem.(1));
+				"directoryCache",jint (mem_size cache_mem.(2));
+				"nativeLibCache",jint (mem_size cache_mem.(3));
+				"additionalSizes",jarray (
+					(match !MacroContext.macro_interp_cache with
+					| Some interp ->
+						jobject ["name",jstring "macro interpreter";"size",jint (mem_size MacroContext.macro_interp_cache);"child",jarray [
+							jobject ["name",jstring "builtins";"size",jint (mem_size_2 interp.builtins [Obj.repr interp])];
+							jobject ["name",jstring "debug";"size",jint (mem_size_2 interp.debug [Obj.repr interp])];
+							jobject ["name",jstring "curapi";"size",jint (mem_size_2 interp.curapi [Obj.repr interp])];
+							jobject ["name",jstring "type_cache";"size",jint (mem_size_2 interp.type_cache [Obj.repr interp])];
+							jobject ["name",jstring "overrides";"size",jint (mem_size_2 interp.overrides [Obj.repr interp])];
+							jobject ["name",jstring "array_prototype";"size",jint (mem_size_2 interp.array_prototype [Obj.repr interp])];
+							jobject ["name",jstring "string_prototype";"size",jint (mem_size_2 interp.string_prototype [Obj.repr interp])];
+							jobject ["name",jstring "vector_prototype";"size",jint (mem_size_2 interp.vector_prototype [Obj.repr interp])];
+							jobject ["name",jstring "instance_prototypes";"size",jint (mem_size_2 interp.instance_prototypes [Obj.repr interp])];
+							jobject ["name",jstring "static_prototypes";"size",jint (mem_size_2 interp.static_prototypes [Obj.repr interp])];
+							jobject ["name",jstring "constructors";"size",jint (mem_size_2 interp.constructors [Obj.repr interp])];
+							jobject ["name",jstring "file_keys";"size",jint (mem_size_2 interp.file_keys [Obj.repr interp])];
+							jobject ["name",jstring "toplevel";"size",jint (mem_size_2 interp.toplevel [Obj.repr interp])];
+							jobject ["name",jstring "eval";"size",jint (mem_size_2 interp.eval [Obj.repr interp]);"child", jarray [
+								(match interp.eval.env with
+								| Some env ->
+									jobject ["name",jstring "env";"size",jint (mem_size_2 interp.eval.env [Obj.repr interp; Obj.repr interp.eval]);"child", jarray [
+										jobject ["name",jstring "env_info";"size",jint (mem_size_2 env.env_info [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_debug";"size",jint (mem_size_2 env.env_debug [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_locals";"size",jint (mem_size_2 env.env_locals [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_captures";"size",jint (mem_size_2 env.env_captures [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_extra_locals";"size",jint (mem_size_2 env.env_extra_locals [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_parent";"size",jint (mem_size_2 env.env_parent [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+										jobject ["name",jstring "env_eval";"size",jint (mem_size_2 env.env_eval [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
+									]];
+								| None ->
+									jobject ["name",jstring "env";"size",jint (mem_size_2 interp.eval.env [Obj.repr interp; Obj.repr interp.eval])];
+								);
+								jobject ["name",jstring "thread";"size",jint (mem_size_2 interp.eval.thread [Obj.repr interp; Obj.repr interp.eval]);"child", jarray [
+									jobject ["name",jstring "tthread";"size",jint (mem_size_2 interp.eval.thread.tthread [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
+									jobject ["name",jstring "tdeque";"size",jint (mem_size_2 interp.eval.thread.tdeque [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
+									jobject ["name",jstring "tevents";"size",jint (mem_size_2 interp.eval.thread.tevents [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
+									jobject ["name",jstring "tstorage";"size",jint (mem_size_2 interp.eval.thread.tstorage [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
+								]];
+								jobject ["name",jstring "debug_state";"size",jint (mem_size_2 interp.eval.debug_state [Obj.repr interp; Obj.repr interp.eval])];
+								jobject ["name",jstring "breakpoint";"size",jint (mem_size_2 interp.eval.breakpoint [Obj.repr interp; Obj.repr interp.eval])];
+								jobject ["name",jstring "caught_types";"size",jint (mem_size_2 interp.eval.caught_types [Obj.repr interp; Obj.repr interp.eval])];
+								jobject ["name",jstring "caught_exception";"size",jint (mem_size_2 interp.eval.caught_exception [Obj.repr interp; Obj.repr interp.eval])];
+								jobject ["name",jstring "last_return";"size",jint (mem_size_2 interp.eval.last_return [Obj.repr interp; Obj.repr interp.eval])];
+								jobject ["name",jstring "debug_channel";"size",jint (mem_size_2 interp.eval.debug_channel [Obj.repr interp; Obj.repr interp.eval])];
+							]];
+							jobject ["name",jstring "evals";"size",jint (mem_size_2 interp.evals [Obj.repr interp])];
+							jobject ["name",jstring "exception_stack";"size",jint (mem_size_2 interp.exception_stack [Obj.repr interp])];
+						]];
+					| None ->
+						jobject ["name",jstring "macro interpreter";"size",jint (mem_size MacroContext.macro_interp_cache)];
+					)
+					::
+					[
+						(* jobject ["name",jstring "macro stdlib";"size",jint (mem_size (EvalContext.GlobalState.stdlib))];
+						jobject ["name",jstring "macro macro_lib";"size",jint (mem_size (EvalContext.GlobalState.macro_lib))]; *)
+						jobject ["name",jstring "last completion result";"size",jint (mem_size (DisplayException.last_completion_result))];
+						jobject ["name",jstring "Lexer file cache";"size",jint (mem_size (Lexer.all_files))];
+						jobject ["name",jstring "GC heap words";"size",jint (int_of_float size)];
+					]
+				);
+			]
+		]
+	| MContext sign ->
+		let cc = cs#get_context sign in
+		let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty cs#get_modules in
+		let l = Hashtbl.fold (fun _ m acc ->
+			(m,(get_module_memory cs all_modules m)) :: acc
+		) cc#get_modules [] in
+		let l = List.sort (fun (_,(size1,_)) (_,(size2,_)) -> compare size2 size1) l in
+		let leaks = ref [] in
+		let l = List.map (fun (m,(size,(reached,_,_,mleaks))) ->
+			if reached then leaks := (m,mleaks) :: !leaks;
+			jobject [
+				"path",jstring (s_type_path m.m_path);
+				"size",jint size;
+				"hasTypes",jbool (match m.m_extra.m_kind with MCode | MMacro -> true | _ -> false);
+			]
+		) l in
+		let leaks = match !leaks with
+			| [] -> jnull
+			| leaks ->
+				let jleaks = List.map (fun (m,leaks) ->
+					let jleaks = List.map (fun s -> jobject ["path",jstring s]) leaks in
+					jobject [
+						"path",jstring (s_type_path m.m_path);
+						"leaks",jarray jleaks;
+					]
+				) leaks in
+				jarray jleaks
+		in
+		let cache_mem = cc#get_pointers in
+		jobject [
+			"leaks",leaks;
+			"syntaxCache",jobject [
+				"size",jint (mem_size cache_mem.(0));
+			];
+			"moduleCache",jobject [
+				"size",jint (mem_size cache_mem.(1));
+				"list",jarray l;
+			];
+			"binaryCache",jobject [
+				"size",jint (mem_size cache_mem.(2));
+			];
+		]
+	| MModule(sign,path) ->
+		let cc = cs#get_context sign in
+		let m = cc#find_module path in
+		let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty cs#get_modules in
+		let _,(_,deps,out,_) = get_module_memory cs all_modules m in
+		let deps = update_module_type_deps deps m in
+		let out = get_out out in
+		let types = List.map (fun md ->
+			let fields,inf = match md with
+				| TClassDecl c ->
+					let own_deps = ref deps in
+					let field acc cf =
+						let repr = Obj.repr cf in
+						own_deps := List.filter (fun repr' -> repr != repr') !own_deps;
+						let deps = List.filter (fun repr' -> repr' != repr) deps in
+						let size = Objsize.size_with_headers (Objsize.objsize cf deps out) in
+						(cf,size) :: acc
+					in
+					let fields = List.fold_left field [] c.cl_ordered_fields in
+					let fields = List.fold_left field fields c.cl_ordered_statics in
+					let fields = List.sort (fun (_,size1) (_,size2) -> compare size2 size1) fields in
+					let fields = List.map (fun (cf,size) ->
+						jobject [
+							"name",jstring cf.cf_name;
+							"size",jint size;
+							"pos",generate_pos_as_location cf.cf_name_pos;
+						]
+					) fields in
+					let repr = Obj.repr c in
+					let deps = List.filter (fun repr' -> repr' != repr) !own_deps in
+					fields,Objsize.objsize c deps out
+				| TEnumDecl en ->
+					let repr = Obj.repr en in
+					let deps = List.filter (fun repr' -> repr' != repr) deps in
+					[],Objsize.objsize en deps out
+				| TTypeDecl td ->
+					let repr = Obj.repr td in
+					let deps = List.filter (fun repr' -> repr' != repr) deps in
+					[],Objsize.objsize td deps out
+				| TAbstractDecl a ->
+					let repr = Obj.repr a in
+					let deps = List.filter (fun repr' -> repr' != repr) deps in
+					[],Objsize.objsize a deps out
+			in
+			let size = Objsize.size_with_headers inf in
+			let jo = jobject [
+				"name",jstring (s_type_path (t_infos md).mt_path);
+				"size",jint size;
+				"pos",generate_pos_as_location (t_infos md).mt_name_pos;
+				"fields",jarray fields;
+			] in
+			size,jo
+		) m.m_types in
+		let types = List.sort (fun (size1,_) (size2,_) -> compare size2 size1) types in
+		let types = List.map snd types in
+		jobject [
+			"moduleExtra",jint (Objsize.size_with_headers (Objsize.objsize m.m_extra deps out));
+			"types",jarray types;
+		]
+	end
+
+let display_memory com =
+	let verbose = com.verbose in
+	let print = print_endline in
+	Gc.full_major();
+	Gc.compact();
+	let mem = Gc.stat() in
+	print ("Total Allocated Memory " ^ fmt_size (mem.Gc.heap_words * (Sys.word_size asr 8)));
+	print ("Free Memory " ^ fmt_size (mem.Gc.free_words * (Sys.word_size asr 8)));
+	let c = com.cs in
+	print ("Total cache size " ^ size c);
+	(* print ("  haxelib " ^ size c.c_haxelib); *)
+	(* print ("  parsed ast " ^ size c.c_files ^ " (" ^ string_of_int (Hashtbl.length c.c_files) ^ " files stored)"); *)
+	(* print ("  typed modules " ^ size c.c_modules ^ " (" ^ string_of_int (Hashtbl.length c.c_modules) ^ " modules stored)"); *)
+	let module_list = c#get_modules in
+	let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty module_list in
+	let modules = List.fold_left (fun acc m ->
+		let (size,r) = get_module_memory c all_modules m in
+		(m,size,r) :: acc
+	) [] module_list in
+	let cur_key = ref "" and tcount = ref 0 and mcount = ref 0 in
+	List.iter (fun (m,size,(reached,deps,out,leaks)) ->
+		let key = m.m_extra.m_sign in
+		if key <> !cur_key then begin
+			print (Printf.sprintf ("    --- CONFIG %s ----------------------------") (Digest.to_hex key));
+			cur_key := key;
+		end;
+		print (Printf.sprintf "    %s : %s" (s_type_path m.m_path) (fmt_size size));
+		(if reached then try
+			incr mcount;
+			let lcount = ref 0 in
+			let leak l =
+				incr lcount;
+				incr tcount;
+				print (Printf.sprintf "      LEAK %s" l);
+				if !lcount >= 3 && !tcount >= 100 && not verbose then begin
+					print (Printf.sprintf "      ...");
+					raise Exit;
+				end;
+			in
+			List.iter leak leaks;
+		with Exit ->
+			());
+		if verbose then begin
+			print (Printf.sprintf "      %d total deps" (List.length deps));
+			PMap.iter (fun _ mdep ->
+				let md = (com.cs#get_context mdep.md_sign)#find_module mdep.md_path in
+				print (Printf.sprintf "      dep %s%s" (s_type_path mdep.md_path) (module_sign key md));
+			) m.m_extra.m_deps;
+		end;
+		flush stdout
+	) (List.sort (fun (m1,s1,_) (m2,s2,_) ->
+		let k1 = m1.m_extra.m_sign and k2 = m2.m_extra.m_sign in
+		if k1 = k2 then s1 - s2 else if k1 > k2 then 1 else -1
+	) modules);
+	if !mcount > 0 then print ("*** " ^ string_of_int !mcount ^ " modules have leaks !");
+	print "Cache dump complete"

+ 7 - 7
src/context/display/displayTexpr.ml

@@ -73,7 +73,7 @@ let check_display_field ctx sc c cf =
 
 let check_display_class ctx decls c =
 	let check_field sc cf =
-		if display_position#enclosed_in cf.cf_pos then
+		if not (has_class_field_flag cf CfNoLookup) && display_position#enclosed_in cf.cf_pos then
 			check_display_field ctx sc c cf;
 		DisplayEmitter.check_display_metadata ctx cf.cf_meta
 	in
@@ -88,7 +88,7 @@ let check_display_class ctx decls c =
 		List.iter check_field c.cl_ordered_statics;
 	| _ ->
 		let sc = find_class_by_position decls c.cl_name_pos in
-		ignore(Typeload.type_type_params ctx TPHType c.cl_path null_pos sc.d_params);
+		ignore(Typeload.type_type_params ctx TPHType c.cl_path sc.d_params);
 		List.iter (function
 			| (HExtends ptp | HImplements ptp) when display_position#enclosed_in ptp.pos_full ->
 				ignore(Typeload.load_instance ~allow_display:true ctx ptp ParamNormal LoadNormal)
@@ -102,7 +102,7 @@ let check_display_class ctx decls c =
 
 let check_display_enum ctx decls en =
 	let se = find_enum_by_position decls en.e_name_pos in
-	ignore(Typeload.type_type_params ctx TPHType en.e_path null_pos se.d_params);
+	ignore(Typeload.type_type_params ctx TPHType en.e_path se.d_params);
 	PMap.iter (fun _ ef ->
 		if display_position#enclosed_in ef.ef_pos then begin
 			let sef = find_enum_field_by_position se ef.ef_name_pos in
@@ -112,12 +112,12 @@ let check_display_enum ctx decls en =
 
 let check_display_typedef ctx decls td =
 	let st = find_typedef_by_position decls td.t_name_pos in
-	ignore(Typeload.type_type_params ctx TPHType td.t_path null_pos st.d_params);
+	ignore(Typeload.type_type_params ctx TPHType td.t_path st.d_params);
 	ignore(Typeload.load_complex_type ctx true LoadNormal st.d_data)
 
 let check_display_abstract ctx decls a =
 	let sa = find_abstract_by_position decls a.a_name_pos in
-	ignore(Typeload.type_type_params ctx TPHType a.a_path null_pos sa.d_params);
+	ignore(Typeload.type_type_params ctx TPHType a.a_path sa.d_params);
 	List.iter (function
 		| (AbOver(ct,p) | AbFrom(ct,p) | AbTo(ct,p)) when display_position#enclosed_in p ->
 			ignore(Typeload.load_complex_type ctx true LoadNormal (ct,p))
@@ -174,11 +174,11 @@ let check_display_file ctx cs =
 			let m = try
 				ctx.com.module_lut#find path
 			with Not_found ->
-				begin match !TypeloadCacheHook.type_module_hook ctx.com (delay ctx.g PConnectField) path null_pos with
+				begin match !TypeloadCacheHook.type_module_hook ctx.com (delay ctx.g) path null_pos with
 				| NoModule | BadModule _ -> raise Not_found
 				| BinaryModule mc ->
 					let api = (new TypeloadModule.hxb_reader_api_typeload ctx.com ctx.g TypeloadModule.load_module' p :> HxbReaderApi.hxb_reader_api) in
-					let reader = new HxbReader.hxb_reader path ctx.com.hxb_reader_stats (Some cc#get_string_pool_arr) (Common.defined ctx.com Define.HxbTimes) in
+					let reader = new HxbReader.hxb_reader path ctx.com.hxb_reader_stats (if Common.defined ctx.com Define.HxbTimes then Some ctx.com.timer_ctx else None) in
 					let m = reader#read_chunks api mc.mc_chunks in
 					m
 				| GoodModule m ->

+ 211 - 211
src/context/display/displayToplevel.ml

@@ -109,20 +109,20 @@ end
 
 let explore_class_paths com timer class_paths recursive f_pack f_module =
 	let cs = com.cs in
-	let t = Timer.timer (timer @ ["class path exploration"]) in
-	let checked = Hashtbl.create 0 in
-	let tasks = ExtList.List.filter_map (fun path ->
-		match path#get_directory_path with
-			| Some path ->
-				Some (new explore_class_path_task com checked recursive f_pack f_module path [])
-			| None ->
-				None
-	) class_paths in
-	let task = new arbitrary_task ["explore"] 50 (fun () ->
-		List.iter (fun task -> task#run) tasks
-	) in
-	cs#add_task task;
-	t()
+	Timer.time com.timer_ctx (timer @ ["class path exploration"]) (fun () ->
+		let checked = Hashtbl.create 0 in
+		let tasks = ExtList.List.filter_map (fun path ->
+			match path#get_directory_path with
+				| Some path ->
+					Some (new explore_class_path_task com checked recursive f_pack f_module path [])
+				| None ->
+					None
+		) class_paths in
+		let task = new arbitrary_task ["explore"] 50 (fun () ->
+			List.iter (fun task -> task#run) tasks
+		) in
+		cs#add_task task;
+	) ()
 
 let read_class_paths com timer =
 	explore_class_paths com timer (com.class_paths#filter (fun cp -> cp#path <> "")) true (fun _ -> ()) (fun file path ->
@@ -225,7 +225,6 @@ let is_pack_visible pack =
 	not (List.exists (fun s -> String.length s > 0 && s.[0] = '_') pack)
 
 let collect ctx tk with_type sort =
-	let t = Timer.timer ["display";"toplevel collect"] in
 	let cctx = CollectionContext.create ctx in
 	let curpack = fst ctx.c.curclass.cl_path in
 	(* Note: This checks for the explicit `ServerConfig.legacy_completion` setting instead of using
@@ -298,12 +297,12 @@ let collect ctx tk with_type sort =
 	| TKType | TKOverride -> ()
 	| TKExpr p | TKPattern p | TKField p ->
 		(* locals *)
-		let t = Timer.timer ["display";"toplevel collect";"locals"] in
-		PMap.iter (fun _ v ->
-			if not (is_gen_local v) then
-				add (make_ci_local v (tpair ~values:(get_value_meta v.v_meta) v.v_type)) (Some v.v_name)
-		) ctx.f.locals;
-		t();
+		Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"locals"] (fun () ->
+			PMap.iter (fun _ v ->
+				if not (is_gen_local v) then
+					add (make_ci_local v (tpair ~values:(get_value_meta v.v_meta) v.v_type)) (Some v.v_name)
+			) ctx.f.locals;
+		) ();
 
 		let add_field scope origin cf =
 			let origin,cf = match origin with
@@ -329,137 +328,137 @@ let collect ctx tk with_type sort =
 			if not (Meta.has Meta.NoCompletion cf.cf_meta) then add_field scope origin cf
 		in
 
-		let t = Timer.timer ["display";"toplevel collect";"fields"] in
-		(* member fields *)
-		if ctx.e.curfun <> FunStatic then begin
-			let all_fields = Type.TClass.get_all_fields ctx.c.curclass (extract_param_types ctx.c.curclass.cl_params) in
-			PMap.iter (fun _ (c,cf) ->
-				let origin = if c == ctx.c.curclass then Self (TClassDecl c) else Parent (TClassDecl c) in
-				maybe_add_field CFSMember origin cf
-			) all_fields;
-			(* TODO: local using? *)
-		end;
-
-		(* statics *)
-		begin match ctx.c.curclass.cl_kind with
-		| KAbstractImpl ({a_impl = Some c} as a) ->
-			let origin = Self (TAbstractDecl a) in
-			List.iter (fun cf ->
-				if has_class_field_flag cf CfImpl then begin
-					if ctx.e.curfun = FunStatic then ()
-					else begin
-						let cf = prepare_using_field cf in
-						maybe_add_field CFSMember origin cf
-					end
-				end else
-					maybe_add_field CFSStatic origin cf
-			) c.cl_ordered_statics
-		| _ ->
-			List.iter (maybe_add_field CFSStatic (Self (TClassDecl ctx.c.curclass))) ctx.c.curclass.cl_ordered_statics
-		end;
-		t();
-
-		let t = Timer.timer ["display";"toplevel collect";"enum ctors"] in
-		(* enum constructors *)
-		let rec enum_ctors t =
-			match t with
-			| TAbstractDecl ({a_impl = Some c} as a) when a.a_enum && not (path_exists cctx a.a_path) && ctx.c.curclass != c ->
-				add_path cctx a.a_path;
+		Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"fields"] (fun () ->
+			(* member fields *)
+			if ctx.e.curfun <> FunStatic then begin
+				let all_fields = Type.TClass.get_all_fields ctx.c.curclass (extract_param_types ctx.c.curclass.cl_params) in
+				PMap.iter (fun _ (c,cf) ->
+					let origin = if c == ctx.c.curclass then Self (TClassDecl c) else Parent (TClassDecl c) in
+					maybe_add_field CFSMember origin cf
+				) all_fields;
+				(* TODO: local using? *)
+			end;
+
+			(* statics *)
+			begin match ctx.c.curclass.cl_kind with
+			| KAbstractImpl ({a_impl = Some c} as a) ->
+				let origin = Self (TAbstractDecl a) in
 				List.iter (fun cf ->
-					let ccf = CompletionClassField.make cf CFSMember (Self (decl_of_class c)) true in
-					if (has_class_field_flag cf CfEnum) && not (Meta.has Meta.NoCompletion cf.cf_meta) then
-						add (make_ci_enum_abstract_field a ccf (tpair cf.cf_type)) (Some cf.cf_name);
+					if has_class_field_flag cf CfImpl then begin
+						if ctx.e.curfun = FunStatic then ()
+						else begin
+							let cf = prepare_using_field cf in
+							maybe_add_field CFSMember origin cf
+						end
+					end else
+						maybe_add_field CFSStatic origin cf
 				) c.cl_ordered_statics
-			| TTypeDecl t ->
-				begin match follow t.t_type with
-					| TEnum (e,_) -> enum_ctors (TEnumDecl e)
-					| _ -> ()
-				end
-			| TEnumDecl e when not (path_exists cctx e.e_path) ->
-				add_path cctx e.e_path;
-				let origin = Self (TEnumDecl e) in
-				PMap.iter (fun _ ef ->
-					let is_qualified = is_qualified cctx ef.ef_name in
-					add (make_ci_enum_field (CompletionEnumField.make ef origin is_qualified) (tpair ef.ef_type)) (Some ef.ef_name)
-				) e.e_constrs;
 			| _ ->
-				()
-		in
-		List.iter enum_ctors ctx.m.curmod.m_types;
-		List.iter enum_ctors (List.map fst ctx.m.import_resolution#extract_type_imports);
-
-		(* enum constructors of expected type *)
-		begin match with_type with
-			| WithType.WithType(t,_) ->
-				(try enum_ctors (module_type_of_type (follow t)) with Exit -> ())
-			| _ -> ()
-		end;
-		t();
-
-		let t = Timer.timer ["display";"toplevel collect";"globals"] in
-		(* imported globals *)
-		PMap.iter (fun name (mt,s,_) ->
-			try
-				let is_qualified = is_qualified cctx name in
-				let class_import c =
-					let cf = PMap.find s c.cl_statics in
-					let cf = if name = cf.cf_name then cf else {cf with cf_name = name} in
-					let decl,make = match c.cl_kind with
-						| KAbstractImpl a -> TAbstractDecl a,
-							if has_class_field_flag cf CfEnum then make_ci_enum_abstract_field a else make_ci_class_field
-						| _ -> TClassDecl c,make_ci_class_field
+				List.iter (maybe_add_field CFSStatic (Self (TClassDecl ctx.c.curclass))) ctx.c.curclass.cl_ordered_statics
+			end;
+		) ();
+
+		Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"enum ctors"] (fun () ->
+			(* enum constructors *)
+			let rec enum_ctors t =
+				match t with
+				| TAbstractDecl ({a_impl = Some c} as a) when a.a_enum && not (path_exists cctx a.a_path) && ctx.c.curclass != c ->
+					add_path cctx a.a_path;
+					List.iter (fun cf ->
+						let ccf = CompletionClassField.make cf CFSMember (Self (decl_of_class c)) true in
+						if (has_class_field_flag cf CfEnum) && not (Meta.has Meta.NoCompletion cf.cf_meta) then
+							add (make_ci_enum_abstract_field a ccf (tpair cf.cf_type)) (Some cf.cf_name);
+					) c.cl_ordered_statics
+				| TTypeDecl t ->
+					begin match follow t.t_type with
+						| TEnum (e,_) -> enum_ctors (TEnumDecl e)
+						| _ -> ()
+					end
+				| TEnumDecl e when not (path_exists cctx e.e_path) ->
+					add_path cctx e.e_path;
+					let origin = Self (TEnumDecl e) in
+					PMap.iter (fun _ ef ->
+						let is_qualified = is_qualified cctx ef.ef_name in
+						add (make_ci_enum_field (CompletionEnumField.make ef origin is_qualified) (tpair ef.ef_type)) (Some ef.ef_name)
+					) e.e_constrs;
+				| _ ->
+					()
+			in
+			List.iter enum_ctors ctx.m.curmod.m_types;
+			List.iter enum_ctors (List.map fst ctx.m.import_resolution#extract_type_imports);
+
+			(* enum constructors of expected type *)
+			begin match with_type with
+				| WithType.WithType(t,_) ->
+					(try enum_ctors (module_type_of_type (follow t)) with Exit -> ())
+				| _ -> ()
+			end;
+		) ();
+
+		Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"globals"] (fun () ->
+			(* imported globals *)
+			PMap.iter (fun name (mt,s,_) ->
+				try
+					let is_qualified = is_qualified cctx name in
+					let class_import c =
+						let cf = PMap.find s c.cl_statics in
+						let cf = if name = cf.cf_name then cf else {cf with cf_name = name} in
+						let decl,make = match c.cl_kind with
+							| KAbstractImpl a -> TAbstractDecl a,
+								if has_class_field_flag cf CfEnum then make_ci_enum_abstract_field a else make_ci_class_field
+							| _ -> TClassDecl c,make_ci_class_field
+						in
+						let origin = StaticImport decl in
+						if can_access ctx c cf true && not (Meta.has Meta.NoCompletion cf.cf_meta) then begin
+							add (make (CompletionClassField.make cf CFSStatic origin is_qualified) (tpair ~values:(get_value_meta cf.cf_meta) cf.cf_type)) (Some name)
+						end
 					in
-					let origin = StaticImport decl in
-					if can_access ctx c cf true && not (Meta.has Meta.NoCompletion cf.cf_meta) then begin
-						add (make (CompletionClassField.make cf CFSStatic origin is_qualified) (tpair ~values:(get_value_meta cf.cf_meta) cf.cf_type)) (Some name)
+					match resolve_typedef mt with
+						| TClassDecl c -> class_import c;
+						| TEnumDecl en ->
+							let ef = PMap.find s en.e_constrs in
+							let ef = if name = ef.ef_name then ef else {ef with ef_name = name} in
+							let origin = StaticImport (TEnumDecl en) in
+							add (make_ci_enum_field (CompletionEnumField.make ef origin is_qualified) (tpair ef.ef_type)) (Some s)
+						| TAbstractDecl {a_impl = Some c} -> class_import c;
+						| _ -> raise Not_found
+				with Not_found ->
+					()
+			) ctx.m.import_resolution#extract_field_imports;
+		) ();
+
+		Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"rest"] (fun () ->
+			(* literals *)
+			add (make_ci_literal "null" (tpair t_dynamic)) (Some "null");
+			add (make_ci_literal "true" (tpair ctx.com.basic.tbool)) (Some "true");
+			add (make_ci_literal "false" (tpair ctx.com.basic.tbool)) (Some "false");
+			begin match ctx.e.curfun with
+				| FunMember | FunConstructor | FunMemberClassLocal ->
+					let t = TInst(ctx.c.curclass,extract_param_types ctx.c.curclass.cl_params) in
+					add (make_ci_literal "this" (tpair t)) (Some "this");
+					begin match ctx.c.curclass.cl_super with
+						| Some(c,tl) -> add (make_ci_literal "super" (tpair (TInst(c,tl)))) (Some "super")
+						| None -> ()
 					end
-				in
-				match resolve_typedef mt with
-					| TClassDecl c -> class_import c;
-					| TEnumDecl en ->
-						let ef = PMap.find s en.e_constrs in
-						let ef = if name = ef.ef_name then ef else {ef with ef_name = name} in
-						let origin = StaticImport (TEnumDecl en) in
-						add (make_ci_enum_field (CompletionEnumField.make ef origin is_qualified) (tpair ef.ef_type)) (Some s)
-					| TAbstractDecl {a_impl = Some c} -> class_import c;
-					| _ -> raise Not_found
-			with Not_found ->
-				()
-		) ctx.m.import_resolution#extract_field_imports;
-		t();
-
-		let t = Timer.timer ["display";"toplevel collect";"rest"] in
-		(* literals *)
-		add (make_ci_literal "null" (tpair t_dynamic)) (Some "null");
-		add (make_ci_literal "true" (tpair ctx.com.basic.tbool)) (Some "true");
-		add (make_ci_literal "false" (tpair ctx.com.basic.tbool)) (Some "false");
-		begin match ctx.e.curfun with
-			| FunMember | FunConstructor | FunMemberClassLocal ->
-				let t = TInst(ctx.c.curclass,extract_param_types ctx.c.curclass.cl_params) in
-				add (make_ci_literal "this" (tpair t)) (Some "this");
-				begin match ctx.c.curclass.cl_super with
-					| Some(c,tl) -> add (make_ci_literal "super" (tpair (TInst(c,tl)))) (Some "super")
-					| None -> ()
-				end
-			| FunMemberAbstract ->
-				let t = TInst(ctx.c.curclass,extract_param_types ctx.c.curclass.cl_params) in
-				add (make_ci_literal "abstract" (tpair t)) (Some "abstract");
-			| _ ->
-				()
-		end;
-
-		if not is_legacy_completion then begin
-			(* keywords *)
-			let kwds = [
-				Function; Var; Final; If; Else; While; Do; For; Break; Return; Continue; Switch;
-				Try; New; Throw; Untyped; Cast; Inline;
-			] in
-			List.iter (fun kwd -> add(make_ci_keyword kwd) (Some (s_keyword kwd))) kwds;
-
-			(* builtins *)
-			add (make_ci_literal "trace" (tpair (TFun(["value",false,t_dynamic],ctx.com.basic.tvoid)))) (Some "trace")
-		end;
-		t()
+				| FunMemberAbstract ->
+					let t = TInst(ctx.c.curclass,extract_param_types ctx.c.curclass.cl_params) in
+					add (make_ci_literal "abstract" (tpair t)) (Some "abstract");
+				| _ ->
+					()
+			end;
+
+			if not is_legacy_completion then begin
+				(* keywords *)
+				let kwds = [
+					Function; Var; Final; If; Else; While; Do; For; Break; Return; Continue; Switch;
+					Try; New; Throw; Untyped; Cast; Inline;
+				] in
+				List.iter (fun kwd -> add(make_ci_keyword kwd) (Some (s_keyword kwd))) kwds;
+
+				(* builtins *)
+				add (make_ci_literal "trace" (tpair (TFun(["value",false,t_dynamic],ctx.com.basic.tvoid)))) (Some "trace")
+			end;
+		) ();
 	end;
 
 	(* type params *)
@@ -473,75 +472,76 @@ let collect ctx tk with_type sort =
 	(* module imports *)
 	List.iter add_type (List.rev_map fst ctx.m.import_resolution#extract_type_imports); (* reverse! *)
 
-	let t_syntax = Timer.timer ["display";"toplevel collect";"syntax"] in
-	(* types from files *)
 	let cs = ctx.com.cs in
-	(* online: iter context files *)
-	init_or_update_server cs ctx.com ["display";"toplevel"];
-	let cc = CommonCache.get_cache ctx.com in
-	let files = cc#get_files in
-	(* Sort files by reverse distance of their package to our current package. *)
-	let files = Hashtbl.fold (fun file cfile acc ->
-		let i = pack_similarity curpack cfile.c_package in
-		((file,cfile),i) :: acc
-	) files [] in
-	let files = List.sort (fun (_,i1) (_,i2) -> -compare i1 i2) files in
-	let check_package pack = match List.rev pack with
+		let check_package pack = match List.rev pack with
 		| [] -> ()
 		| s :: sl -> add_package (List.rev sl,s)
 	in
-	List.iter (fun ((file_key,cfile),_) ->
-		let module_name = CompilationCache.get_module_name_of_cfile cfile.c_file_path.file cfile in
-		let dot_path = s_type_path (cfile.c_package,module_name) in
-		(* In legacy mode we only show toplevel types. *)
-		if is_legacy_completion && cfile.c_package <> [] then begin
-			(* And only toplevel packages. *)
-			match cfile.c_package with
-			| [s] -> add_package ([],s)
-			| _ -> ()
-		end else if (List.exists (fun e -> ExtString.String.starts_with dot_path (e ^ ".")) !exclude) then
-			()
-		else begin
-			ctx.com.module_to_file#add (cfile.c_package,module_name) cfile.c_file_path;
-			if process_decls cfile.c_package module_name cfile.c_decls then check_package cfile.c_package;
-		end
-	) files;
-	t_syntax();
-
-	let t_native_lib = Timer.timer ["display";"toplevel collect";"native lib"] in
-	List.iter (fun file ->
-		match cs#get_native_lib file with
-		| Some lib ->
-			Hashtbl.iter (fun path (pack,decls) ->
-				if process_decls pack (snd path) decls then check_package pack;
-			) lib.c_nl_files
-		| None ->
-			()
-	) ctx.com.native_libs.all_libs;
-	t_native_lib();
-
-	let t_packages = Timer.timer ["display";"toplevel collect";"packages"] in
-	(* packages *)
-	Hashtbl.iter (fun path _ ->
-		let full_pack = fst path @ [snd path] in
-		if is_pack_visible full_pack then add (make_ci_package path []) (Some (snd path))
-	) packages;
-	t_packages();
-
-	t();
-
-	let t = Timer.timer ["display";"toplevel sorting"] in
-	(* sorting *)
-	let l = DynArray.to_list cctx.items in
-	let l = if is_legacy_completion then
-		List.sort (fun item1 item2 -> compare (get_name item1) (get_name item2)) l
-	else if sort then
-		Display.sort_fields l with_type tk
-	else
+	Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"syntax"] (fun () ->
+		(* types from files *)
+		(* online: iter context files *)
+		init_or_update_server cs ctx.com ["display";"toplevel"];
+		let cc = CommonCache.get_cache ctx.com in
+		let files = cc#get_files in
+		(* Sort files by reverse distance of their package to our current package. *)
+		let files = Hashtbl.fold (fun file cfile acc ->
+			let i = pack_similarity curpack cfile.c_package in
+			((file,cfile),i) :: acc
+		) files [] in
+		let files = List.sort (fun (_,i1) (_,i2) -> -compare i1 i2) files in
+		List.iter (fun ((file_key,cfile),_) ->
+			let module_name = CompilationCache.get_module_name_of_cfile cfile.c_file_path.file cfile in
+			let dot_path = s_type_path (cfile.c_package,module_name) in
+			(* In legacy mode we only show toplevel types. *)
+			if is_legacy_completion && cfile.c_package <> [] then begin
+				(* And only toplevel packages. *)
+				match cfile.c_package with
+				| [s] -> add_package ([],s)
+				| _ -> ()
+			end else if (List.exists (fun e -> ExtString.String.starts_with dot_path (e ^ ".")) !exclude) then
+				()
+			else begin
+				ctx.com.module_to_file#add (cfile.c_package,module_name) cfile.c_file_path;
+				if process_decls cfile.c_package module_name cfile.c_decls then check_package cfile.c_package;
+			end
+		) files;
+	) ();
+
+	Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"native lib"] (fun () ->
+		List.iter (fun file ->
+			match cs#get_native_lib file with
+			| Some lib ->
+				Hashtbl.iter (fun path (pack,decls) ->
+					if process_decls pack (snd path) decls then check_package pack;
+				) lib.c_nl_files
+			| None ->
+				()
+		) ctx.com.native_libs.all_libs;
+	) ();
+
+	Timer.time ctx.com.timer_ctx ["display";"toplevel collect";"packages"] (fun () ->
+		(* packages *)
+		Hashtbl.iter (fun path _ ->
+			let full_pack = fst path @ [snd path] in
+			if is_pack_visible full_pack then add (make_ci_package path []) (Some (snd path))
+		) packages;
+	) ();
+
+	Timer.time ctx.com.timer_ctx ["display";"toplevel sorting"] (fun () ->
+		(* sorting *)
+		let l = DynArray.to_list cctx.items in
+		let l = if is_legacy_completion then
+			List.sort (fun item1 item2 -> compare (get_name item1) (get_name item2)) l
+		else if sort then
+			Display.sort_fields l with_type tk
+		else
+			l
+		in
 		l
-	in
-	t();
-	l
+	) ()
+
+let collect ctx tk with_type sort =
+	Timer.time ctx.com.timer_ctx ["display";"toplevel collect"] (collect ctx tk with_type) sort
 
 let collect_and_raise ctx tk with_type cr (name,pname) pinsert =
 	let fields = match !DisplayException.last_completion_pos with

+ 4 - 4
src/context/display/exprPreprocessing.ml

@@ -3,7 +3,7 @@ open Ast
 open DisplayTypes.DisplayMode
 open DisplayPosition
 
-let find_before_pos dm e =
+let find_before_pos was_auto_triggered dm e =
 	let display_pos = ref (DisplayPosition.display_position#get) in
 	let was_annotated = ref false in
 	let is_annotated,is_completion = match dm with
@@ -160,7 +160,7 @@ let find_before_pos dm e =
 			raise Exit
 		| EMeta((Meta.Markup,_,_),(EConst(String _),p)) when is_annotated p ->
 			annotate_marked e
-		| EConst (String (_,q)) when ((q <> SSingleQuotes) || !Parser.was_auto_triggered) && is_annotated (pos e) && is_completion ->
+		| EConst (String (_,q)) when ((q <> SSingleQuotes) || was_auto_triggered) && is_annotated (pos e) && is_completion ->
 			(* TODO: check if this makes any sense *)
 			raise Exit
 		| EConst(Regexp _) when is_annotated (pos e) && is_completion ->
@@ -199,13 +199,13 @@ let find_before_pos dm e =
 	in
 	try map e with Exit -> e
 
-let find_display_call e =
+let find_display_call was_auto_triggered e =
 	let found = ref false in
 	let handle_el e el =
 		let call_arg_is_marked () =
 			el = [] || List.exists (fun (e,_) -> match e with EDisplay(_,DKMarked) -> true | _ -> false) el
 		in
-		if not !Parser.was_auto_triggered || call_arg_is_marked () then begin
+		if not was_auto_triggered || call_arg_is_marked () then begin
 		found := true;
 		Parser.mk_display_expr e DKCall
 		end else

+ 25 - 29
src/context/display/findReferences.ml

@@ -8,22 +8,20 @@ let find_possible_references tctx cs =
 	let name,_,kind = Display.ReferencePosition.get () in
 	ignore(SyntaxExplorer.explore_uncached_modules tctx cs [name,kind])
 
-let find_references tctx com with_definition pos_filters =
-	let t = Timer.timer ["display";"references";"collect"] in
-	let symbols,relations = Statistics.collect_statistics tctx pos_filters true in
-	t();
+let find_references com with_definition pos_filters =
+	let symbols,relations = Timer.time com.timer_ctx ["display";"references";"collect"] (Statistics.collect_statistics com pos_filters) true in
 	let rec loop acc (relations:(Statistics.relation * pos) list) = match relations with
 		| (Statistics.Referenced,p) :: relations when not (List.mem p acc) -> loop (p :: acc) relations
 		| _ :: relations -> loop acc relations
 		| [] -> acc
 	in
-	let t = Timer.timer ["display";"references";"filter"] in
-	let usages = Hashtbl.fold (fun p sym acc ->
-		let acc = if with_definition then p :: acc else acc in
-		(try loop acc (Hashtbl.find relations p)
-		with Not_found -> acc)
-	) symbols [] in
-	t();
+	let usages = Timer.time com.timer_ctx ["display";"references";"filter"] (fun () ->
+		Hashtbl.fold (fun p sym acc ->
+			let acc = if with_definition then p :: acc else acc in
+			(try loop acc (Hashtbl.find relations p)
+			with Not_found -> acc)
+		) symbols []
+	) () in
 	Display.ReferencePosition.reset();
 	usages
 
@@ -121,14 +119,14 @@ let rec collect_reference_positions com (name,pos,kind) =
 	| _ ->
 		[name,pos,kind]
 
-let find_references tctx com with_definition =
+let find_references com with_definition =
 	let pos_filters =
 		List.fold_left (fun acc (_,p,_) ->
 			if p = null_pos then acc
 			else Statistics.SFPos p :: acc
 		) [] (collect_reference_positions com (Display.ReferencePosition.get ()))
 	in
-	let usages = find_references tctx com with_definition pos_filters in
+	let usages = find_references com with_definition pos_filters in
 	let usages =
 		List.sort (fun p1 p2 ->
 			let c = compare p1.pfile p2.pfile in
@@ -137,29 +135,27 @@ let find_references tctx com with_definition =
 	in
 	DisplayException.raise_positions usages
 
-let find_implementations tctx com name pos kind =
-	let t = Timer.timer ["display";"implementations";"collect"] in
-	let symbols,relations = Statistics.collect_statistics tctx [SFPos pos] false in
-	t();
+let find_implementations com name pos kind =
+	let symbols,relations = Timer.time com.timer_ctx ["display";"implementations";"collect"] (Statistics.collect_statistics com [SFPos pos]) false in
 	let rec loop acc relations = match relations with
 		| ((Statistics.Implemented | Statistics.Overridden | Statistics.Extended),p) :: relations -> loop (p :: acc) relations
 		| _ :: relations -> loop acc relations
 		| [] -> acc
 	in
-	let t = Timer.timer ["display";"implementations";"filter"] in
-	let usages = Hashtbl.fold (fun p sym acc ->
-		(try loop acc (Hashtbl.find relations p)
-		with Not_found -> acc)
-	) symbols [] in
-	let usages = List.sort (fun p1 p2 ->
-		let c = compare p1.pfile p2.pfile in
-		if c <> 0 then c else compare p1.pmin p2.pmin
-	) usages in
-	t();
+	let usages = Timer.time com.timer_ctx ["display";"implementations";"filter"] (fun () ->
+		let usages = Hashtbl.fold (fun p sym acc ->
+			(try loop acc (Hashtbl.find relations p)
+			with Not_found -> acc)
+		) symbols [] in
+		List.sort (fun p1 p2 ->
+			let c = compare p1.pfile p2.pfile in
+			if c <> 0 then c else compare p1.pmin p2.pmin
+		) usages
+	) () in
 	Display.ReferencePosition.reset();
 	DisplayException.raise_positions usages
 
-let find_implementations tctx com =
+let find_implementations com =
 	let name,pos,kind = Display.ReferencePosition.get () in
-	if pos <> null_pos then find_implementations tctx com name pos kind
+	if pos <> null_pos then find_implementations com name pos kind
 	else DisplayException.raise_positions []

+ 3 - 2
src/context/display/importHandling.ml

@@ -113,8 +113,9 @@ let init_import ctx path mode p =
 		let check_alias mt name pname =
 			if not (name.[0] >= 'A' && name.[0] <= 'Z') then
 				raise_typing_error "Type aliases must start with an uppercase letter" pname;
-			if ctx.m.is_display_file && DisplayPosition.display_position#enclosed_in pname then
-				DisplayEmitter.display_alias ctx name (type_of_module_type mt) pname;
+			(* Imports from import.hx should not match display position from current file *)
+			if ctx.m.is_display_file && DisplayPosition.display_position#enclosed_in pname && (Path.UniqueKey.create pname.pfile) = (Path.UniqueKey.lazy_key ctx.m.curmod.m_extra.m_file) then
+				DisplayEmitter.display_alias ctx name (type_of_module_type mt) pname
 		in
 		let add_static_init t name s =
 			match resolve_typedef t with

+ 4 - 8
src/context/display/statistics.ml

@@ -15,7 +15,7 @@ type statistics_filter =
 	| SFPos of pos
 	| SFFile of string
 
-let collect_statistics ctx pos_filters with_expressions =
+let collect_statistics com pos_filters with_expressions =
 	let relations = Hashtbl.create 0 in
 	let symbols = Hashtbl.create 0 in
 	let handled_modules = Hashtbl.create 0 in
@@ -25,7 +25,7 @@ let collect_statistics ctx pos_filters with_expressions =
 			try
 				Hashtbl.find paths path
 			with Not_found ->
-				let unique = ctx.com.file_keys#get path in
+				let unique = com.file_keys#get path in
 				Hashtbl.add paths path unique;
 				unique
 		)
@@ -175,10 +175,6 @@ let collect_statistics ctx pos_filters with_expressions =
 			| TVar(v,eo) ->
 				Option.may loop eo;
 				var_decl v;
-			| TFor(v,e1,e2) ->
-				var_decl v;
-				loop e1;
-				loop e2;
 			| TFunction tf ->
 				List.iter (fun (v,_) -> var_decl v) tf.tf_args;
 				loop tf.tf_expr;
@@ -213,7 +209,7 @@ let collect_statistics ctx pos_filters with_expressions =
 		List.iter f com.types;
 		Option.may loop (com.get_macros())
 	in
-	loop ctx.com;
+	loop com;
 	(* find things *)
 	let f = function
 		| TClassDecl c ->
@@ -258,7 +254,7 @@ let collect_statistics ctx pos_filters with_expressions =
 		List.iter f com.types;
 		Option.may loop (com.get_macros())
 	in
-	loop ctx.com;
+	loop com;
 	(* TODO: Using syntax-exploration here is technically fine, but I worry about performance in real codebases. *)
 	(* let find_symbols = Hashtbl.fold (fun _ kind acc ->
 		let name = string_of_symbol kind in

+ 18 - 18
src/context/display/syntaxExplorer.ml

@@ -165,23 +165,23 @@ let explore_uncached_modules tctx cs symbols =
 	let cc = CommonCache.get_cache tctx.com in
 	let files = cc#get_files in
 	let modules = cc#get_modules in
-	let t = Timer.timer ["display";"references";"candidates"] in
-	let acc = Hashtbl.fold (fun file_key cfile acc ->
-		let module_name = get_module_name_of_cfile cfile.c_file_path.file cfile in
-		if Hashtbl.mem modules (cfile.c_package,module_name) then
-			acc
-		else try
-			find_in_syntax symbols (cfile.c_package,cfile.c_decls);
-			acc
-		with Exit ->
-			begin try
-				let m = tctx.g.do_load_module tctx (cfile.c_package,module_name) null_pos in
-				(* We have to flush immediately so we catch exceptions from weird modules *)
-				Typecore.flush_pass tctx.g Typecore.PFinal ("final",cfile.c_package @ [module_name]);
-				m :: acc
-			with _ ->
+	let acc = Timer.time tctx.com.timer_ctx ["display";"references";"candidates"] (fun () ->
+		Hashtbl.fold (fun file_key cfile acc ->
+			let module_name = get_module_name_of_cfile cfile.c_file_path.file cfile in
+			if Hashtbl.mem modules (cfile.c_package,module_name) then
 				acc
-			end
-	) files [] in
-	t();
+			else try
+				find_in_syntax symbols (cfile.c_package,cfile.c_decls);
+				acc
+			with Exit ->
+				begin try
+					let m = tctx.g.do_load_module tctx (cfile.c_package,module_name) null_pos in
+					(* We have to flush immediately so we catch exceptions from weird modules *)
+					Typecore.flush_pass tctx.g PFinal ("final",cfile.c_package @ [module_name]);
+					m :: acc
+				with _ ->
+					acc
+				end
+		) files []
+	) () in
 	acc

+ 3 - 3
src/context/formatString.ml

@@ -1,7 +1,7 @@
 open Globals
 open Ast
 
-let format_string defines s p process_expr =
+let format_string config s p process_expr =
 	let e = ref None in
 	let pmin = ref p.pmin in
 	let min = ref (p.pmin + 1) in
@@ -83,8 +83,8 @@ let format_string defines s p process_expr =
 					if Lexer.string_is_whitespace scode then Error.raise_typing_error "Expression cannot be empty" ep
 					else Error.raise_typing_error msg pos
 				in
-				match ParserEntry.parse_expr_string defines scode ep error true with
-					| ParseSuccess(data,_,_) -> data
+				match ParserEntry.parse_expr_string config scode ep error true with
+					| ParseSuccess(data,_) -> data
 					| ParseError(_,(msg,p),_) -> error (Parser.error_msg msg) p
 			in
 			add_expr e slen

+ 0 - 50
src/context/lookup.ml

@@ -7,21 +7,13 @@ class virtual ['key,'value] lookup = object(self)
 	method virtual fold : 'acc . ('key -> 'value -> 'acc -> 'acc) -> 'acc -> 'acc
 	method virtual mem : 'key -> bool
 	method virtual clear : unit
-
-	method virtual start_group : int
-	method virtual commit_group : int -> int
-	method virtual discard_group : int -> int
 end
 
 class ['key,'value] pmap_lookup = object(self)
 	inherit ['key,'value] lookup
 	val mutable lut : ('key,'value) PMap.t = PMap.empty
 
-	val mutable group_id : int ref = ref 0
-	val mutable groups : (int,'key list) PMap.t = PMap.empty
-
 	method add (key : 'key) (value : 'value) =
-		groups <- PMap.map (fun modules -> key :: modules) groups;
 		lut <- PMap.add key value lut
 
 	method remove (key : 'key) =
@@ -41,36 +33,13 @@ class ['key,'value] pmap_lookup = object(self)
 
 	method clear =
 		lut <- PMap.empty
-
-	method start_group =
-		incr group_id;
-		let i = !group_id in
-		groups <- PMap.add i [] groups;
-		i
-
-	method commit_group i =
-		let group = PMap.find i groups in
-		let n = List.length group in
-		groups <- PMap.remove i groups;
-		n
-
-	method discard_group i =
-		let group = PMap.find i groups in
-		let n = List.length group in
-		List.iter (fun mpath -> self#remove mpath) group;
-		groups <- PMap.remove i groups;
-		n
 end
 
 class ['key,'value] hashtbl_lookup = object(self)
 	inherit ['key,'value] lookup
 	val lut : ('key,'value) Hashtbl.t = Hashtbl.create 0
 
-	val mutable group_id : int ref = ref 0
-	val mutable groups : (int,'key list) Hashtbl.t = Hashtbl.create 0
-
 	method add (key : 'key) (value : 'value) =
-		Hashtbl.iter (fun i modules -> Hashtbl.replace groups i (key :: modules)) groups;
 		Hashtbl.replace lut key value
 
 	method remove (key : 'key) =
@@ -90,24 +59,5 @@ class ['key,'value] hashtbl_lookup = object(self)
 
 	method clear =
 		Hashtbl.clear lut
-
-	method start_group =
-		incr group_id;
-		let i = !group_id in
-		Hashtbl.replace groups i [];
-		i
-
-	method commit_group i =
-		let group = Hashtbl.find groups i in
-		let n = List.length group in
-		Hashtbl.remove groups i;
-		n
-
-	method discard_group i =
-		let group = Hashtbl.find groups i in
-		let n = List.length group in
-		List.iter (fun mpath -> self#remove mpath) group;
-		Hashtbl.remove groups i;
-		n
 end
 

+ 0 - 252
src/context/memory.ml

@@ -1,7 +1,6 @@
 open Globals
 open Common
 open Type
-open Genjson
 
 type memory_request =
 	| MCache
@@ -94,254 +93,3 @@ let fmt_word f =
 
 let size v =
 	fmt_size (mem_size v)
-
-let get_memory_json (cs : CompilationCache.t) mreq =
-	begin match mreq with
-	| MCache ->
-		let old_gc = Gc.get() in
-		Gc.set { old_gc with
-			Gc.max_overhead = 0;
-			Gc.space_overhead = 0
-		};
-		Gc.compact();
-		Gc.set old_gc;
-		let stat = Gc.quick_stat() in
-		let size = (float_of_int stat.Gc.heap_words) *. (float_of_int (Sys.word_size / 8)) in
-		let cache_mem = cs#get_pointers in
-		let contexts = cs#get_contexts in
-		let j_contexts = List.map (fun cc -> jobject [
-			"context",cc#get_json;
-			"size",jint (mem_size cc);
-		]) contexts in
-		let mem_size_2 v exclude =
-			Objsize.size_with_headers (Objsize.objsize v exclude [])
-		in
-		jobject [
-			"contexts",jarray j_contexts;
-			"memory",jobject [
-				"totalCache",jint (mem_size cs);
-				"contextCache",jint (mem_size cache_mem.(0));
-				"haxelibCache",jint (mem_size cache_mem.(1));
-				"directoryCache",jint (mem_size cache_mem.(2));
-				"nativeLibCache",jint (mem_size cache_mem.(3));
-				"additionalSizes",jarray (
-					(match !MacroContext.macro_interp_cache with
-					| Some interp ->
-						jobject ["name",jstring "macro interpreter";"size",jint (mem_size MacroContext.macro_interp_cache);"child",jarray [
-							jobject ["name",jstring "builtins";"size",jint (mem_size_2 interp.builtins [Obj.repr interp])];
-							jobject ["name",jstring "debug";"size",jint (mem_size_2 interp.debug [Obj.repr interp])];
-							jobject ["name",jstring "curapi";"size",jint (mem_size_2 interp.curapi [Obj.repr interp])];
-							jobject ["name",jstring "type_cache";"size",jint (mem_size_2 interp.type_cache [Obj.repr interp])];
-							jobject ["name",jstring "overrides";"size",jint (mem_size_2 interp.overrides [Obj.repr interp])];
-							jobject ["name",jstring "array_prototype";"size",jint (mem_size_2 interp.array_prototype [Obj.repr interp])];
-							jobject ["name",jstring "string_prototype";"size",jint (mem_size_2 interp.string_prototype [Obj.repr interp])];
-							jobject ["name",jstring "vector_prototype";"size",jint (mem_size_2 interp.vector_prototype [Obj.repr interp])];
-							jobject ["name",jstring "instance_prototypes";"size",jint (mem_size_2 interp.instance_prototypes [Obj.repr interp])];
-							jobject ["name",jstring "static_prototypes";"size",jint (mem_size_2 interp.static_prototypes [Obj.repr interp])];
-							jobject ["name",jstring "constructors";"size",jint (mem_size_2 interp.constructors [Obj.repr interp])];
-							jobject ["name",jstring "file_keys";"size",jint (mem_size_2 interp.file_keys [Obj.repr interp])];
-							jobject ["name",jstring "toplevel";"size",jint (mem_size_2 interp.toplevel [Obj.repr interp])];
-							jobject ["name",jstring "eval";"size",jint (mem_size_2 interp.eval [Obj.repr interp]);"child", jarray [
-								(match interp.eval.env with
-								| Some env ->
-									jobject ["name",jstring "env";"size",jint (mem_size_2 interp.eval.env [Obj.repr interp; Obj.repr interp.eval]);"child", jarray [
-										jobject ["name",jstring "env_info";"size",jint (mem_size_2 env.env_info [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
-										jobject ["name",jstring "env_debug";"size",jint (mem_size_2 env.env_debug [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
-										jobject ["name",jstring "env_locals";"size",jint (mem_size_2 env.env_locals [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
-										jobject ["name",jstring "env_captures";"size",jint (mem_size_2 env.env_captures [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
-										jobject ["name",jstring "env_extra_locals";"size",jint (mem_size_2 env.env_extra_locals [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
-										jobject ["name",jstring "env_parent";"size",jint (mem_size_2 env.env_parent [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
-										jobject ["name",jstring "env_eval";"size",jint (mem_size_2 env.env_eval [Obj.repr interp; Obj.repr interp.eval; Obj.repr env])];
-									]];
-								| None ->
-									jobject ["name",jstring "env";"size",jint (mem_size_2 interp.eval.env [Obj.repr interp; Obj.repr interp.eval])];
-								);
-								jobject ["name",jstring "thread";"size",jint (mem_size_2 interp.eval.thread [Obj.repr interp; Obj.repr interp.eval]);"child", jarray [
-									jobject ["name",jstring "tthread";"size",jint (mem_size_2 interp.eval.thread.tthread [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
-									jobject ["name",jstring "tdeque";"size",jint (mem_size_2 interp.eval.thread.tdeque [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
-									jobject ["name",jstring "tevents";"size",jint (mem_size_2 interp.eval.thread.tevents [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
-									jobject ["name",jstring "tstorage";"size",jint (mem_size_2 interp.eval.thread.tstorage [Obj.repr interp; Obj.repr interp.eval; Obj.repr interp.eval.thread])];
-								]];
-								jobject ["name",jstring "debug_state";"size",jint (mem_size_2 interp.eval.debug_state [Obj.repr interp; Obj.repr interp.eval])];
-								jobject ["name",jstring "breakpoint";"size",jint (mem_size_2 interp.eval.breakpoint [Obj.repr interp; Obj.repr interp.eval])];
-								jobject ["name",jstring "caught_types";"size",jint (mem_size_2 interp.eval.caught_types [Obj.repr interp; Obj.repr interp.eval])];
-								jobject ["name",jstring "caught_exception";"size",jint (mem_size_2 interp.eval.caught_exception [Obj.repr interp; Obj.repr interp.eval])];
-								jobject ["name",jstring "last_return";"size",jint (mem_size_2 interp.eval.last_return [Obj.repr interp; Obj.repr interp.eval])];
-								jobject ["name",jstring "debug_channel";"size",jint (mem_size_2 interp.eval.debug_channel [Obj.repr interp; Obj.repr interp.eval])];
-							]];
-							jobject ["name",jstring "evals";"size",jint (mem_size_2 interp.evals [Obj.repr interp])];
-							jobject ["name",jstring "exception_stack";"size",jint (mem_size_2 interp.exception_stack [Obj.repr interp])];
-						]];
-					| None ->
-						jobject ["name",jstring "macro interpreter";"size",jint (mem_size MacroContext.macro_interp_cache)];
-					)
-					::
-					[
-						(* jobject ["name",jstring "macro stdlib";"size",jint (mem_size (EvalContext.GlobalState.stdlib))];
-						jobject ["name",jstring "macro macro_lib";"size",jint (mem_size (EvalContext.GlobalState.macro_lib))]; *)
-						jobject ["name",jstring "last completion result";"size",jint (mem_size (DisplayException.last_completion_result))];
-						jobject ["name",jstring "Lexer file cache";"size",jint (mem_size (Lexer.all_files))];
-						jobject ["name",jstring "GC heap words";"size",jint (int_of_float size)];
-					]
-				);
-			]
-		]
-	| MContext sign ->
-		let cc = cs#get_context sign in
-		let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty cs#get_modules in
-		let l = Hashtbl.fold (fun _ m acc ->
-			(m,(get_module_memory cs all_modules m)) :: acc
-		) cc#get_modules [] in
-		let l = List.sort (fun (_,(size1,_)) (_,(size2,_)) -> compare size2 size1) l in
-		let leaks = ref [] in
-		let l = List.map (fun (m,(size,(reached,_,_,mleaks))) ->
-			if reached then leaks := (m,mleaks) :: !leaks;
-			jobject [
-				"path",jstring (s_type_path m.m_path);
-				"size",jint size;
-				"hasTypes",jbool (match m.m_extra.m_kind with MCode | MMacro -> true | _ -> false);
-			]
-		) l in
-		let leaks = match !leaks with
-			| [] -> jnull
-			| leaks ->
-				let jleaks = List.map (fun (m,leaks) ->
-					let jleaks = List.map (fun s -> jobject ["path",jstring s]) leaks in
-					jobject [
-						"path",jstring (s_type_path m.m_path);
-						"leaks",jarray jleaks;
-					]
-				) leaks in
-				jarray jleaks
-		in
-		let cache_mem = cc#get_pointers in
-		jobject [
-			"leaks",leaks;
-			"syntaxCache",jobject [
-				"size",jint (mem_size cache_mem.(0));
-			];
-			"moduleCache",jobject [
-				"size",jint (mem_size cache_mem.(1));
-				"list",jarray l;
-			];
-			"binaryCache",jobject [
-				"size",jint (mem_size cache_mem.(2));
-			];
-		]
-	| MModule(sign,path) ->
-		let cc = cs#get_context sign in
-		let m = cc#find_module path in
-		let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty cs#get_modules in
-		let _,(_,deps,out,_) = get_module_memory cs all_modules m in
-		let deps = update_module_type_deps deps m in
-		let out = get_out out in
-		let types = List.map (fun md ->
-			let fields,inf = match md with
-				| TClassDecl c ->
-					let own_deps = ref deps in
-					let field acc cf =
-						let repr = Obj.repr cf in
-						own_deps := List.filter (fun repr' -> repr != repr') !own_deps;
-						let deps = List.filter (fun repr' -> repr' != repr) deps in
-						let size = Objsize.size_with_headers (Objsize.objsize cf deps out) in
-						(cf,size) :: acc
-					in
-					let fields = List.fold_left field [] c.cl_ordered_fields in
-					let fields = List.fold_left field fields c.cl_ordered_statics in
-					let fields = List.sort (fun (_,size1) (_,size2) -> compare size2 size1) fields in
-					let fields = List.map (fun (cf,size) ->
-						jobject [
-							"name",jstring cf.cf_name;
-							"size",jint size;
-							"pos",generate_pos_as_location cf.cf_name_pos;
-						]
-					) fields in
-					let repr = Obj.repr c in
-					let deps = List.filter (fun repr' -> repr' != repr) !own_deps in
-					fields,Objsize.objsize c deps out
-				| TEnumDecl en ->
-					let repr = Obj.repr en in
-					let deps = List.filter (fun repr' -> repr' != repr) deps in
-					[],Objsize.objsize en deps out
-				| TTypeDecl td ->
-					let repr = Obj.repr td in
-					let deps = List.filter (fun repr' -> repr' != repr) deps in
-					[],Objsize.objsize td deps out
-				| TAbstractDecl a ->
-					let repr = Obj.repr a in
-					let deps = List.filter (fun repr' -> repr' != repr) deps in
-					[],Objsize.objsize a deps out
-			in
-			let size = Objsize.size_with_headers inf in
-			let jo = jobject [
-				"name",jstring (s_type_path (t_infos md).mt_path);
-				"size",jint size;
-				"pos",generate_pos_as_location (t_infos md).mt_name_pos;
-				"fields",jarray fields;
-			] in
-			size,jo
-		) m.m_types in
-		let types = List.sort (fun (size1,_) (size2,_) -> compare size2 size1) types in
-		let types = List.map snd types in
-		jobject [
-			"moduleExtra",jint (Objsize.size_with_headers (Objsize.objsize m.m_extra deps out));
-			"types",jarray types;
-		]
-	end
-
-let display_memory com =
-	let verbose = com.verbose in
-	let print = print_endline in
-	Gc.full_major();
-	Gc.compact();
-	let mem = Gc.stat() in
-	print ("Total Allocated Memory " ^ fmt_size (mem.Gc.heap_words * (Sys.word_size asr 8)));
-	print ("Free Memory " ^ fmt_size (mem.Gc.free_words * (Sys.word_size asr 8)));
-	let c = com.cs in
-	print ("Total cache size " ^ size c);
-	(* print ("  haxelib " ^ size c.c_haxelib); *)
-	(* print ("  parsed ast " ^ size c.c_files ^ " (" ^ string_of_int (Hashtbl.length c.c_files) ^ " files stored)"); *)
-	(* print ("  typed modules " ^ size c.c_modules ^ " (" ^ string_of_int (Hashtbl.length c.c_modules) ^ " modules stored)"); *)
-	let module_list = c#get_modules in
-	let all_modules = List.fold_left (fun acc m -> PMap.add m.m_id m acc) PMap.empty module_list in
-	let modules = List.fold_left (fun acc m ->
-		let (size,r) = get_module_memory c all_modules m in
-		(m,size,r) :: acc
-	) [] module_list in
-	let cur_key = ref "" and tcount = ref 0 and mcount = ref 0 in
-	List.iter (fun (m,size,(reached,deps,out,leaks)) ->
-		let key = m.m_extra.m_sign in
-		if key <> !cur_key then begin
-			print (Printf.sprintf ("    --- CONFIG %s ----------------------------") (Digest.to_hex key));
-			cur_key := key;
-		end;
-		print (Printf.sprintf "    %s : %s" (s_type_path m.m_path) (fmt_size size));
-		(if reached then try
-			incr mcount;
-			let lcount = ref 0 in
-			let leak l =
-				incr lcount;
-				incr tcount;
-				print (Printf.sprintf "      LEAK %s" l);
-				if !lcount >= 3 && !tcount >= 100 && not verbose then begin
-					print (Printf.sprintf "      ...");
-					raise Exit;
-				end;
-			in
-			List.iter leak leaks;
-		with Exit ->
-			());
-		if verbose then begin
-			print (Printf.sprintf "      %d total deps" (List.length deps));
-			PMap.iter (fun _ mdep ->
-				let md = (com.cs#get_context mdep.md_sign)#find_module mdep.md_path in
-				print (Printf.sprintf "      dep %s%s" (s_type_path mdep.md_path) (module_sign key md));
-			) m.m_extra.m_deps;
-		end;
-		flush stdout
-	) (List.sort (fun (m1,s1,_) (m2,s2,_) ->
-		let k1 = m1.m_extra.m_sign and k2 = m2.m_extra.m_sign in
-		if k1 = k2 then s1 - s2 else if k1 > k2 then 1 else -1
-	) modules);
-	if !mcount > 0 then print ("*** " ^ string_of_int !mcount ^ " modules have leaks !");
-	print "Cache dump complete"

+ 28 - 0
src/context/parallel.ml

@@ -0,0 +1,28 @@
+let run_parallel_for num_domains ?(chunk_size=0) length f =
+	let pool = Domainslib.Task.setup_pool ~num_domains:(num_domains - 1) () in
+	Domainslib.Task.run pool (fun _ -> Domainslib.Task.parallel_for pool ~chunk_size ~start:0 ~finish:(length-1) ~body:f);
+	Domainslib.Task.teardown_pool pool
+
+module ParallelArray = struct
+	let iter pool f a =
+		let f' idx = f a.(idx) in
+		Domainslib.Task.parallel_for pool ~start:0 ~finish:(Array.length a - 1) ~body:f'
+
+	let map pool f a x =
+		let length = Array.length a in
+		let a_out = Array.make length x in
+		let f' idx =
+			Array.unsafe_set a_out idx (f (Array.unsafe_get a idx))
+		in
+		Domainslib.Task.parallel_for pool ~start:0 ~finish:(length - 1) ~body:f';
+		a_out
+end
+
+module ParallelSeq = struct
+	let iter pool f seq =
+		ParallelArray.iter pool f (Array.of_seq seq)
+end
+
+let run_in_new_pool timer_ctx f =
+	let pool = Timer.time timer_ctx ["domainslib";"setup"] (Domainslib.Task.setup_pool ~num_domains:(Domain.recommended_domain_count() - 1)) () in
+	Std.finally (fun () -> Timer.time timer_ctx ["domainslib";"teardown"] Domainslib.Task.teardown_pool pool) (Domainslib.Task.run pool) (fun () -> f pool)

+ 123 - 0
src/context/platformConfig.ml

@@ -0,0 +1,123 @@
+open Globals
+open Type
+
+(**
+	The capture policy tells which handling we make of captured locals
+	(the locals which are referenced in local functions)
+
+	See details/implementation in Codegen.captured_vars
+*)
+type capture_policy =
+	(** do nothing, let the platform handle it *)
+	| CPNone
+	(** wrap all captured variables into a single-element array to allow modifications *)
+	| CPWrapRef
+	(** similar to wrap ref, but will only apply to the locals that are declared in loops *)
+	| CPLoopVars
+
+type exceptions_config = {
+	(* Base types which may be thrown from Haxe code without wrapping. *)
+	ec_native_throws : path list;
+	(* Base types which may be caught from Haxe code without wrapping. *)
+	ec_native_catches : path list;
+	(*
+		Hint exceptions filter to avoid wrapping for targets, which can throw/catch any type
+		Ignored on targets with a specific native base type for exceptions.
+	*)
+	ec_avoid_wrapping : bool;
+	(* Path of a native class or interface, which can be used for wildcard catches. *)
+	ec_wildcard_catch : path;
+	(*
+		Path of a native base class or interface, which can be thrown.
+		This type is used to cast `haxe.Exception.thrown(v)` calls to.
+		For example `throw 123` is compiled to `throw (cast Exception.thrown(123):ec_base_throw)`
+	*)
+	ec_base_throw : path;
+	(*
+		Checks if throwing this expression is a special case for current target
+		and should not be modified.
+	*)
+	ec_special_throw : texpr -> bool;
+}
+
+type var_scope =
+	| FunctionScope
+	| BlockScope
+
+type var_scoping_flags =
+	(**
+		Variables are hoisted in their scope
+	*)
+	| VarHoisting
+	(**
+		It's not allowed to shadow existing variables in a scope.
+	*)
+	| NoShadowing
+	(**
+		It's not allowed to shadow a `catch` variable.
+	*)
+	| NoCatchVarShadowing
+	(**
+		Local vars cannot have the same name as the current top-level package or
+		(if in the root package) current class name
+	*)
+	| ReserveCurrentTopLevelSymbol
+	(**
+		Local vars cannot have a name used for any top-level symbol
+		(packages and classes in the root package)
+	*)
+	| ReserveAllTopLevelSymbols
+	(**
+		Reserve all type-paths converted to "flat path" with `Path.flat_path`
+	*)
+	| ReserveAllTypesFlat
+	(**
+		List of names cannot be taken by local vars
+	*)
+	| ReserveNames of string list
+	(**
+		Cases in a `switch` won't have blocks, but will share the same outer scope.
+	*)
+	| SwitchCasesNoBlocks
+
+type var_scoping_config = {
+	vs_flags : var_scoping_flags list;
+	vs_scope : var_scope;
+}
+
+type platform_config = {
+	(** has a static type system, with not-nullable basic types (Int/Float/Bool) *)
+	pf_static : bool;
+	(** has access to the "sys" package *)
+	pf_sys : bool;
+	(** captured variables handling (see before) *)
+	pf_capture_policy : capture_policy;
+	(** when calling a method with optional args, do we replace the missing args with "null" constants *)
+	pf_pad_nulls : bool;
+	(** add a final return to methods not having one already - prevent some compiler warnings *)
+	pf_add_final_return : bool;
+	(** does the platform natively support overloaded functions *)
+	pf_overload : bool;
+	(** can the platform use default values for non-nullable arguments *)
+	pf_can_skip_non_nullable_argument : bool;
+	(** type paths that are reserved on the platform *)
+	pf_reserved_type_paths : path list;
+	(** supports function == function **)
+	pf_supports_function_equality : bool;
+	(** uses utf16 encoding with ucs2 api **)
+	pf_uses_utf16 : bool;
+	(** target supports accessing `this` before calling `super(...)` **)
+	pf_this_before_super : bool;
+	(** target supports threads **)
+	pf_supports_threads : bool;
+	(** target supports Unicode **)
+	pf_supports_unicode : bool;
+	(** target supports rest arguments **)
+	pf_supports_rest_args : bool;
+	(** exceptions handling config **)
+	pf_exceptions : exceptions_config;
+	(** the scoping of local variables *)
+	pf_scoping : var_scoping_config;
+	(** target supports atomic operations via haxe.Atomic **)
+	pf_supports_atomics : bool;
+}

+ 166 - 0
src/context/safeCom.ml

@@ -0,0 +1,166 @@
+open Globals
+open Type
+open PlatformConfig
+
+type saved_warning = {
+	w_module : module_def;
+	w_warning : WarningList.warning;
+	w_options : Warning.warning_option list list;
+	w_msg : string;
+	w_pos : pos;
+}
+
+type t = {
+	basic : basic_types;
+	platform : platform;
+	defines : Define.define;
+	platform_config : platform_config;
+	debug : bool;
+	is_macro_context : bool;
+	foptimize : bool;
+	doinline : bool;
+	exceptions : exn list ref;
+	exceptions_mutex : Mutex.t;
+	warnings : saved_warning list ref;
+	warnings_mutex : Mutex.t;
+	errors : Error.error list ref;
+	errors_mutex : Mutex.t;
+	timer_ctx : Timer.timer_context;
+	find_module : path -> module_def;
+	find_module_by_type : path -> module_def;
+	curclass : tclass;
+	curfield : tclass_field;
+}
+
+let of_com (com : Common.context) = {
+	basic = com.basic;
+	platform = com.platform;
+	defines = com.defines;
+	platform_config = com.config;
+	debug = com.debug;
+	is_macro_context = com.is_macro_context;
+	foptimize = com.foptimize;
+	doinline = com.doinline;
+	exceptions = ref [];
+	exceptions_mutex = Mutex.create ();
+	warnings = ref [];
+	warnings_mutex = Mutex.create ();
+	errors = ref [];
+	errors_mutex = Mutex.create ();
+	timer_ctx = com.timer_ctx;
+	find_module = com.module_lut#find;
+	find_module_by_type = com.module_lut#find_by_type;
+	curclass = null_class;
+	curfield = null_field;
+}
+
+let of_typer (ctx : Typecore.typer) = {
+	(of_com ctx.com) with
+	curclass = ctx.c.curclass;
+	curfield = ctx.f.curfield;
+}
+
+let finalize scom com =
+	let warnings = !(scom.warnings) in
+	let errors = !(scom.errors) in
+	let exns = !(scom.exceptions) in
+	scom.warnings := [];
+	scom.errors := [];
+	scom.exceptions := [];
+	List.iter (fun warning ->
+		Common.module_warning com warning.w_module warning.w_warning warning.w_options warning.w_msg warning.w_pos
+	) warnings;
+	List.iter (fun err ->
+		Common.display_error_ext com err
+	) errors;
+	match exns with
+	| x :: _ ->
+		raise x
+	| [] ->
+		()
+
+let run_with_scom com scom f =
+	Std.finally (fun() -> finalize scom com) f ()
+
+let add_error scom err =
+	Mutex.protect scom.errors_mutex (fun () -> scom.errors := err :: !(scom.errors))
+
+let add_exn scom exn = match exn with
+	| Error.Error err ->
+		add_error scom err
+	| _ ->
+		Mutex.protect scom.exceptions_mutex (fun () -> scom.exceptions := exn :: !(scom.exceptions))
+
+let add_warning scom w msg p =
+	let options = (Warning.from_meta scom.curfield.cf_meta) @ (Warning.from_meta scom.curclass.cl_meta) in
+	match Warning.get_mode w options with
+	| WMEnable ->
+		Mutex.protect scom.warnings_mutex (fun () ->
+			let warning = {
+				w_module = scom.curclass.cl_module;
+				w_warning = w;
+				w_options = options;
+				w_msg = msg;
+				w_pos = p;
+			} in
+			scom.warnings := warning :: !(scom.warnings)
+		)
+	| WMDisable ->
+		()
+
+let run_expression_filters_safe ?(ignore_processed_status=false) scom detail_times filters t =
+	let run scom identifier e =
+		try
+			List.fold_left (fun e (filter_name,f) ->
+				try
+					FilterContext.with_timer scom.timer_ctx detail_times filter_name identifier (fun () -> f scom e)
+				with Failure msg ->
+					Error.raise_typing_error msg e.epos
+			) e filters
+		with exc ->
+			add_exn scom exc;
+			e
+	in
+	match t with
+	| TClassDecl c when FilterContext.is_removable_class c -> ()
+	| TClassDecl c ->
+		let scom = {scom with curclass = c} in
+		let rec process_field cf =
+			if ignore_processed_status || not (has_class_field_flag cf CfPostProcessed) then begin
+				let scom = {scom with curfield = cf} in
+				(match cf.cf_expr with
+				| Some e when not (FilterContext.is_removable_field scom.is_macro_context cf) ->
+					let identifier = Printf.sprintf "%s.%s" (s_type_path c.cl_path) cf.cf_name in
+					cf.cf_expr <- Some (run scom (Some identifier) e);
+				| _ -> ());
+			end;
+			List.iter process_field cf.cf_overloads
+		in
+		List.iter process_field c.cl_ordered_fields;
+		List.iter process_field c.cl_ordered_statics;
+		(match c.cl_constructor with
+		| None -> ()
+		| Some f -> process_field f);
+		(match TClass.get_cl_init c with
+		| None -> ()
+		| Some e ->
+			let identifier = Printf.sprintf "%s.__init__" (s_type_path c.cl_path) in
+			TClass.set_cl_init c (run scom (Some identifier) e))
+	| TEnumDecl _ -> ()
+	| TTypeDecl _ -> ()
+	| TAbstractDecl _ -> ()
+
+let adapt_scom_to_mt scom mt = match mt with
+	| TClassDecl c ->
+		{scom with curclass = c}
+	| _ ->
+		scom
+
+let run_type_filters_safe scom filters types =
+	List.iter (fun t ->
+		let scom = adapt_scom_to_mt scom t in
+		List.iter (fun f -> f scom t) filters
+	) types
+
+let needs_inline scom c cf =
+	cf.cf_kind = Method MethInline && (scom.doinline || Typecore.is_forced_inline c cf)

+ 48 - 62
src/context/typecore.ml

@@ -21,6 +21,7 @@ open Globals
 open Ast
 open Common
 open Type
+open TyperPass
 open Error
 open Resolution
 open FieldCallCandidate
@@ -50,21 +51,6 @@ type access_mode =
 	| MSet of Ast.expr option (* rhs, if exists *)
 	| MCall of Ast.expr list (* call arguments *)
 
-type typer_pass =
-	| PBuildModule			(* build the module structure and setup module type parameters *)
-	| PBuildClass			(* build the class structure *)
-	| PConnectField			(* handle associated fields, which may affect each other. E.g. a property and its getter *)
-	| PTypeField			(* type the class field, allow access to types structures *)
-	| PCheckConstraint		(* perform late constraint checks with inferred types *)
-	| PForce				(* usually ensure that lazy have been evaluated *)
-	| PFinal				(* not used, only mark for finalize *)
-
-let all_typer_passes = [
-	PBuildModule;PBuildClass;PConnectField;PTypeField;PCheckConstraint;PForce;PFinal
-]
-
-let all_typer_passes_length = List.length all_typer_passes
-
 type typer_module = {
 	curmod : module_def;
 	import_resolution : resolution_list;
@@ -108,7 +94,6 @@ type typer_globals = {
 	mutable delayed : typer_pass_tasks Array.t;
 	mutable delayed_min_index : int;
 	mutable debug_delayed : (typer_pass * ((unit -> unit) * (string * string list) * typer) list) list;
-	doinline : bool;
 	retain_meta : bool;
 	mutable core_api : typer option;
 	mutable macros : ((unit -> unit) * typer) option;
@@ -141,7 +126,7 @@ and typer_expr = {
 	in_function : bool;
 	mutable ret : t;
 	mutable opened : anon_status ref list;
-	mutable monomorphs : monomorphs;
+	mutable monomorphs : (tmono * pos) list;
 	mutable in_loop : bool;
 	mutable bypass_accessor : int;
 	mutable with_type_stack : WithType.t list;
@@ -177,10 +162,6 @@ and typer = {
 	memory_marker : float array;
 }
 
-and monomorphs = {
-	mutable perfunction : (tmono * pos) list;
-}
-
 let pass_name = function
 	| PBuildModule -> "build-module"
 	| PBuildClass -> "build-class"
@@ -241,9 +222,7 @@ module TyperManager = struct
 			in_function;
 			ret = t_dynamic;
 			opened = [];
-			monomorphs = {
-				perfunction = [];
-			};
+			monomorphs = [];
 			in_loop = false;
 			bypass_accessor = 0;
 			with_type_stack = [];
@@ -284,6 +263,18 @@ module TyperManager = struct
 
 	let clone_for_expr ctx curfun in_function =
 		let e = create_ctx_e curfun in_function in
+		begin match curfun with
+		| FunMember | FunMemberAbstract | FunStatic | FunConstructor ->
+			(* Monomorphs from field arguments and return types are created before
+			   ctx.e is cloned, so they have to be absorbed here. A better fix might
+			   be to clone ctx.e earlier, but that comes with its own challenges. *)
+			e.monomorphs <- ctx.e.monomorphs;
+			ctx.e.monomorphs <- []
+		| FunMemberAbstractLocal | FunMemberClassLocal ->
+			(* We don't need to do this for local functions because the cloning happens
+			   earlier there. *)
+			()
+		end;
 		create ctx ctx.m ctx.c ctx.f e PTypeField ctx.type_params
 
 	let clone_for_type_params ctx params =
@@ -374,7 +365,7 @@ let unify_min_for_type_source ctx el src = (!unify_min_for_type_source_ref) ctx
 
 let spawn_monomorph' ctx p =
 	let mono = Monomorph.create () in
-	ctx.monomorphs.perfunction <- (mono,p) :: ctx.monomorphs.perfunction;
+	ctx.e.monomorphs <- (mono,p) :: ctx.e.monomorphs;
 	mono
 
 let spawn_monomorph ctx p =
@@ -384,12 +375,6 @@ let make_static_field_access c cf t p =
 	let ethis = Texpr.Builder.make_static_this c p in
 	mk (TField (ethis,(FStatic (c,cf)))) t p
 
-let make_static_call ctx c cf map args t p =
-	let monos = List.map (fun _ -> spawn_monomorph ctx.e p) cf.cf_params in
-	let map t = map (apply_params cf.cf_params monos t) in
-	let ef = make_static_field_access c cf (map cf.cf_type) p in
-	make_call ctx ef args (map t) p
-
 let raise_with_type_error ?(depth = 0) msg p =
 	raise (WithTypeError (make_error ~depth (Custom msg) p))
 
@@ -422,7 +407,7 @@ let unify_raise_custom uctx t1 t2 p =
 			(* no untyped check *)
 			raise_error_msg (Unify l) p
 
-let unify_raise = unify_raise_custom default_unification_context
+let unify_raise a b = unify_raise_custom (default_unification_context()) a b
 
 let save_locals ctx =
 	let locals = ctx.f.locals in
@@ -430,7 +415,7 @@ let save_locals ctx =
 
 let add_local ctx k n t p =
 	let v = alloc_var k n t p in
-	if Define.defined ctx.com.defines Define.WarnVarShadowing && n <> "_" then begin
+	if n <> "_" then begin
 		match k with
 		| VUser _ ->
 			begin try
@@ -453,8 +438,6 @@ let add_local_with_origin ctx origin n t p =
 	Naming.check_local_variable_name ctx.com n origin p;
 	add_local ctx (VUser origin) n t p
 
-let gen_local_prefix = "`"
-
 let gen_local ctx t p =
 	add_local ctx VGenerated gen_local_prefix t p
 
@@ -484,7 +467,7 @@ let delay_if_mono g p t f = match follow t with
 	| _ ->
 		f()
 
-let rec flush_pass g p where =
+let rec flush_pass g (p : typer_pass) where =
 	let rec loop i =
 		if i > (Obj.magic p) then
 			()
@@ -509,30 +492,11 @@ let make_pass ctx f = f
 let enter_field_typing_pass g info =
 	flush_pass g PConnectField info
 
-let make_lazy ?(force=true) ctx t_proc f where =
-	let r = ref (lazy_available t_dynamic) in
-	r := lazy_wait (fun() ->
-		try
-			r := lazy_processing t_proc;
-			let t = f r in
-			r := lazy_available t;
-			t
-		with
-			| Error e ->
-				raise (Fatal_error e)
-	);
-	if force then delay ctx PForce (fun () -> ignore(lazy_type r));
+let make_lazy ctx t_proc f where =
+	let r = make_unforced_lazy t_proc f where in
+	delay ctx PForce (fun () -> ignore(lazy_type r));
 	r
 
-let is_removable_field com f =
-	not (has_class_field_flag f CfOverride) && (
-		has_class_field_flag f CfExtern || has_class_field_flag f CfGeneric
-		|| (match f.cf_kind with
-			| Var {v_read = AccRequire (s,_)} -> true
-			| Method MethMacro -> not com.is_macro_context
-			| _ -> false)
-	)
-
 let is_forced_inline c cf =
 	match c with
 	| Some { cl_kind = KAbstractImpl _ } -> true
@@ -541,7 +505,7 @@ let is_forced_inline c cf =
 	| _ -> false
 
 let needs_inline ctx c cf =
-	cf.cf_kind = Method MethInline && ctx.allow_inline && (ctx.g.doinline || is_forced_inline c cf)
+	cf.cf_kind = Method MethInline && ctx.allow_inline && (ctx.com.doinline || is_forced_inline c cf)
 
 (** checks if we can access to a given class field using current context *)
 let can_access ctx c cf stat =
@@ -688,6 +652,28 @@ let safe_mono_close ctx m p =
 let relative_path ctx file =
 	ctx.com.class_paths#relative_path file
 
+let mk_infos_t =
+	let fileName = ("fileName",null_pos,NoQuotes) in
+	let lineNumber = ("lineNumber",null_pos,NoQuotes) in
+	let className = ("className",null_pos,NoQuotes) in
+	let methodName = ("methodName",null_pos,NoQuotes) in
+	(fun ctx p params t ->
+		let file = if ctx.com.is_macro_context then p.pfile else if Common.defined ctx.com Define.AbsolutePath then Path.get_full_path p.pfile else relative_path ctx p.pfile in
+		let line = Lexer.get_error_line p in
+		let class_name = s_type_path ctx.c.curclass.cl_path in
+		let fields =
+			(fileName,Texpr.Builder.make_string ctx.com.basic file p) ::
+			(lineNumber,Texpr.Builder.make_int ctx.com.basic line p) ::
+			(className,Texpr.Builder.make_string ctx.com.basic class_name p) ::
+			if ctx.f.curfield.cf_name = "" then
+				params
+			else
+				(methodName,Texpr.Builder.make_string ctx.com.basic ctx.f.curfield.cf_name p) ::
+				params
+		in
+		mk (TObjectDecl fields) t p
+	)
+
 let mk_infos ctx p params =
 	let file = if ctx.com.is_macro_context then p.pfile else if Common.defined ctx.com Define.AbsolutePath then Path.get_full_path p.pfile else relative_path ctx p.pfile in
 	(EObjectDecl (
@@ -899,19 +885,19 @@ let make_where ctx where =
 	let inf = ctx_pos ctx in
 	where ^ " (" ^ String.concat "." inf ^ ")",inf
 
-let make_lazy ?(force=true) ctx t f (where:string) =
+let make_lazy ctx t f (where:string) =
 	let r = ref (lazy_available t_dynamic) in
 	r := lazy_wait (make_pass ~inf:(make_where ctx where) ctx (fun() ->
 		try
 			r := lazy_processing t;
-			let t = f r in
+			let t = f () in
 			r := lazy_available t;
 			t
 		with
 			| Error e ->
 				raise (Fatal_error e)
 	));
-	if force then delay ctx PForce (fun () -> ignore(lazy_type r));
+	delay ctx PForce (fun () -> ignore(lazy_type r));
 	r
 
 *)

+ 14 - 0
src/context/typerPass.ml

@@ -0,0 +1,14 @@
+type typer_pass =
+	| PBuildModule			(* build the module structure and setup module type parameters *)
+	| PBuildClass			(* build the class structure *)
+	| PConnectField			(* handle associated fields, which may affect each other. E.g. a property and its getter *)
+	| PTypeField			(* type the class field, allow access to types structures *)
+	| PCheckConstraint		(* perform late constraint checks with inferred types *)
+	| PForce				(* usually ensure that lazy have been evaluated *)
+	| PFinal				(* not used, only mark for finalize *)
+
+let all_typer_passes = [
+	PBuildModule;PBuildClass;PConnectField;PTypeField;PCheckConstraint;PForce;PFinal
+]
+
+let all_typer_passes_length = List.length all_typer_passes

+ 10 - 9
src/core/abstract.ml

@@ -63,8 +63,6 @@ let find_to uctx b ab tl =
 				Some(find_field_to uctx a b ab tl)
 		)
 
-let underlying_type_stack = new_rec_stack()
-
 (**
 	Returns type parameters and the list of types, which should be known at compile time
 	to be able to choose multitype specialization.
@@ -98,14 +96,14 @@ let rec find_multitype_params a pl =
 		tl,!definitive_types
 
 and find_multitype_specialization_type a pl =
-	let uctx = default_unification_context in
+	let uctx = default_unification_context () in
 	let m = mk_mono() in
 	let tl,definitive_types = find_multitype_params a pl in
 	ignore(find_to uctx m a tl);
 	if List.exists (fun t -> has_mono t) definitive_types then raise Not_found;
 	follow m
 
-and get_underlying_type ?(return_first=false) a pl =
+and get_underlying_type' stack ?(return_first=false) a pl =
 	let maybe_recurse t =
 		let rec loop t = match t with
 			| TMono r ->
@@ -119,12 +117,12 @@ and get_underlying_type ?(return_first=false) a pl =
 			| TType (t,tl) ->
 				loop (apply_typedef t tl)
 			| TAbstract(a,tl) when not (Meta.has Meta.CoreType a.a_meta) ->
-				if rec_stack_exists (fast_eq t) underlying_type_stack then begin
+				if rec_stack_exists (fast_eq t) stack then begin
 					let pctx = print_context() in
-					let s = String.concat " -> " (List.map (fun t -> s_type pctx t) (List.rev (t :: underlying_type_stack.rec_stack))) in
+					let s = String.concat " -> " (List.map (fun t -> s_type pctx t) (List.rev (t :: stack.rec_stack))) in
 					raise_typing_error ("Abstract chain detected: " ^ s) a.a_pos
 				end;
-				get_underlying_type a tl
+				get_underlying_type' stack a tl
 			| _ ->
 				t
 		in
@@ -132,7 +130,7 @@ and get_underlying_type ?(return_first=false) a pl =
 			Even if only the first underlying type was requested
 			keep traversing to detect mutually recursive abstracts
 		*)
-		let result = rec_stack_loop underlying_type_stack (TAbstract(a,pl)) loop t in
+		let result = rec_stack_loop stack (TAbstract(a,pl)) loop t in
 		if return_first then t
 		else result
 	in
@@ -145,6 +143,9 @@ and get_underlying_type ?(return_first=false) a pl =
 		else
 			maybe_recurse (apply_params a.a_params pl a.a_this)
 
+and get_underlying_type ?(return_first=false) a pl =
+	get_underlying_type' (new_rec_stack()) ~return_first a pl
+
 and follow_with_abstracts t = match follow t with
 	| TAbstract(a,tl) when not (Meta.has Meta.CoreType a.a_meta) ->
 		follow_with_abstracts (get_underlying_type a tl)
@@ -155,7 +156,7 @@ let rec follow_with_forward_ctor ?(build=false) t = match follow t with
 	| TAbstract(a,tl) as t ->
 		if build then build_abstract a;
 		if Meta.has Meta.ForwardNew a.a_meta && not (match a.a_impl with
-			| Some c -> PMap.mem "_new" c.cl_statics
+			| Some c -> a.a_constructor <> None
 			| None -> false
 		) then
 			follow_with_forward_ctor (get_underlying_type ~return_first:true a tl)

+ 9 - 1
src/core/define.ml

@@ -6,6 +6,11 @@ type define = {
 	mutable defines_signature : string option;
 }
 
+let empty_defines () = {
+	defines_signature = None;
+	values = PMap.empty;
+}
+
 type user_define = {
 	doc : string;
 	flags : define_parameter list;
@@ -26,16 +31,18 @@ type define_infos = {
 	d_origin : define_origin;
 	d_links : string list;
 	d_deprecated : string option;
+	d_default : string option;
 }
 
 let infos ?user_defines d =
 	let extract_infos (t, (doc, flags), origin) =
-		let params = ref [] and pfs = ref [] and links = ref [] and deprecated = ref None in
+		let params = ref [] and pfs = ref [] and links = ref [] and deprecated = ref None and default = ref None in
 		List.iter (function
 			| HasParam s -> params := s :: !params
 			| Platforms fl -> pfs := fl @ !pfs
 			| Link url -> links := url :: !links
 			| Deprecated s -> deprecated := Some s
+			| DefaultValue s -> default := Some s
 		) flags;
 		(t, {
 			d_doc = doc;
@@ -44,6 +51,7 @@ let infos ?user_defines d =
 			d_origin = origin;
 			d_links = !links;
 			d_deprecated = !deprecated;
+			d_default = !default;
 		})
 	in
 

+ 5 - 8
src/core/display/completionItem.ml

@@ -182,14 +182,11 @@ module CompletionModuleType = struct
 			raise Exit
 
 	let of_module_type mt =
-		let actor a = match a.a_impl with
-			| None -> No
-			| Some c ->
-				try
-					let cf = PMap.find "_new" c.cl_statics in
-					if (has_class_flag c CExtern) || (has_class_field_flag cf CfPublic) then Yes else YesButPrivate
-				with Not_found ->
-					No
+		let actor a = match a.a_impl,a.a_constructor with
+			| Some c,Some cf ->
+				if (has_class_flag c CExtern) || (has_class_field_flag cf CfPublic) then Yes else YesButPrivate
+			| _ ->
+				No
 		in
 		let ctor c =
 			try

+ 29 - 0
src/core/ds/nowOrLater.ml

@@ -0,0 +1,29 @@
+type t = {
+	now : Mutex.t;
+	later: (unit -> unit) Queue.t;
+	later_mutex : Mutex.t;
+}
+
+let create () = {
+	now = Mutex.create ();
+	later = Queue.create ();
+	later_mutex = Mutex.create ();
+}
+
+let try_now nol f =
+	if Mutex.try_lock nol.now then begin
+		f();
+		Mutex.unlock nol.now
+	end else begin
+		Mutex.protect nol.later_mutex (fun () -> Queue.push f nol.later)
+	end
+
+let handle_later nol =
+	let rec loop () = match Queue.take_opt nol.later with
+		| Some f ->
+			f ();
+			loop ()
+		| None ->
+			()
+	in
+	loop ()

+ 1 - 10
src/core/ds/stringPool.ml

@@ -1,13 +1,4 @@
-module StringHashtbl = Hashtbl.Make(struct
-	type t = string
-
-	let equal =
-		String.equal
-
-	let hash s =
-		(* What's the best here? *)
-		Hashtbl.hash s
-end)
+open Globals
 
 type t = {
 	lut : int StringHashtbl.t;

+ 18 - 0
src/core/ds/threadSafeHashtbl.ml

@@ -0,0 +1,18 @@
+type ('a,'b) t = {
+	h : ('a,'b) Hashtbl.t;
+	mutex : Mutex.t
+}
+
+let create size = {
+	h = Hashtbl.create size;
+	mutex = Mutex.create ();
+}
+
+let add h k v =
+	Mutex.protect h.mutex (fun () -> Hashtbl.add h.h k) v
+
+let replace h k v =
+	Mutex.protect h.mutex (fun () -> Hashtbl.replace h.h k) v
+
+let find h k =
+	Mutex.protect h.mutex (fun () -> Hashtbl.find h.h) k

+ 3 - 3
src/core/dune

@@ -1,17 +1,17 @@
 (rule
 	(targets metaList.ml)
 	(deps ../../src-json/meta.json)
-	(action (with-stdout-to metaList.ml (run %{bin:haxe_prebuild} meta ../../src-json/meta.json)))
+	(action (with-stdout-to metaList.ml (run ../prebuild.exe meta ../../src-json/meta.json)))
 )
 
 (rule
 	(targets defineList.ml)
 	(deps ../../src-json/define.json)
-	(action (with-stdout-to defineList.ml (run %{bin:haxe_prebuild} define ../../src-json/define.json)))
+	(action (with-stdout-to defineList.ml (run ../prebuild.exe define ../../src-json/define.json)))
 )
 
 (rule
 	(targets warningList.ml)
 	(deps ../../src-json/warning.json)
-	(action (with-stdout-to warningList.ml (run %{bin:haxe_prebuild} warning ../../src-json/warning.json)))
+	(action (with-stdout-to warningList.ml (run ../prebuild.exe warning ../../src-json/warning.json)))
 )

+ 1 - 3
src/core/error.ml

@@ -183,9 +183,7 @@ module BetterErrors = struct
 	let rec s_type ctx t =
 		match t with
 		| TMono r ->
-			(match r.tm_type with
-			| None -> Printf.sprintf "Unknown<%d>" (try List.assq t (!ctx) with Not_found -> let n = List.length !ctx in ctx := (t,n) :: !ctx; n)
-			| Some t -> s_type ctx t)
+			MonomorphPrinting.s_mono s_type ctx false r
 		| TEnum (e,tl) ->
 			s_type_path e.e_path ^ s_type_params ctx tl
 		| TInst (c,tl) ->

+ 37 - 6
src/core/globals.ml

@@ -10,6 +10,25 @@ module IntMap = Map.Make(struct type t = int let compare i1 i2 = i2 - i1 end)
 module StringMap = Map.Make(struct type t = string let compare = String.compare end)
 module Int32Map = Map.Make(struct type t = Int32.t let compare = Int32.compare end)
 
+module IntHashtbl = Hashtbl.Make(struct
+	type t = int
+
+	let equal =
+		Int.equal
+
+	let hash = Int.hash
+end)
+
+module StringHashtbl = Hashtbl.Make(struct
+	type t = string
+
+	let equal =
+		String.equal
+
+	let hash s =
+		Hashtbl.hash s
+end)
+
 type platform =
 	| Cross
 	| Js
@@ -24,6 +43,15 @@ type platform =
 	| Eval
 	| CustomTarget of string
 
+type compiler_version = {
+	version: int;
+	major: int;
+	minor: int;
+	revision: int;
+	pre: string option;
+	extra: (string * string) option;
+}
+
 let version = 5000
 let version_major = version / 1000
 let version_minor = (version mod 1000) / 100
@@ -135,14 +163,19 @@ let s_version =
 	let pre = Option.map_default (fun pre -> "-" ^ pre) "" version_pre in
 	Printf.sprintf "%d.%d.%d%s" version_major version_minor version_revision pre
 
-let s_version_full =
-	match Version.version_extra with
+let s_version_full v =
+	match v.extra with
 		| Some (_,build) -> s_version ^ "+" ^ build
 		| _ -> s_version
 
 
 let patch_string_pos p s = { p with pmin = p.pmax - String.length s }
 
+let gen_local_prefix = "`"
+
+(* msg * backtrace *)
+exception Ice of string * string
+
 (**
 	Terminates compiler process and prints user-friendly instructions about filing an issue.
 	Usage: `die message __LOC__`, where `__LOC__` is a built-in ocaml constant
@@ -163,10 +196,8 @@ let die ?p msg ml_loc =
 		try snd (ExtString.String.split backtrace "\n")
 		with ExtString.Invalid_string -> backtrace
 	in
-	let ver = s_version_full
-	and os_type = if Sys.unix then "unix" else "windows" in
-	let s = Printf.sprintf "%s\nHaxe: %s; OS type: %s;\n%s\n%s" msg ver os_type ml_loc backtrace in
-	failwith s
+	let backtrace = ml_loc ^ "\n" ^ backtrace in
+	raise (Ice (msg,backtrace))
 
 let dump_callstack () =
 	print_endline (Printexc.raw_backtrace_to_string (Printexc.get_callstack 200))

+ 1 - 1
src/core/inheritDoc.ml

@@ -181,7 +181,7 @@ and get_target_doc ctx e_target =
 			| TAbstract ({ a_impl = Some c }, _) ->
 				let c_opt, cf =
 					let field_name =
-						if field_name = "new" then "_new"
+						if field_name = "new" then "_hx_new"
 						else field_name
 					in
 					get_class_field c field_name

+ 10 - 16
src/core/json/genjson.ml

@@ -420,12 +420,6 @@ and generate_texpr ctx e =
 	| TBlock el ->
 		let el = List.map (generate_texpr ctx) el in
 		"TBlock",Some (jarray el)
-	| TFor(v,e1,e2) ->
-		"TFor",Some (jobject [
-			"v",generate_tvar ctx v;
-			"expr1",generate_texpr ctx e1;
-			"expr2",generate_texpr ctx e2;
-		]);
 	| TIf(e1,e2,eo) ->
 		"TIf",Some (jobject [
 			"eif",generate_texpr ctx e1;
@@ -733,13 +727,13 @@ let create_context ?jsonrpc gm = {
 	request = match jsonrpc with None -> None | Some jsonrpc -> Some (new JsonRequest.json_request jsonrpc)
 }
 
-let generate types file =
-	let t = Timer.timer ["generate";"json";"construct"] in
-	let ctx = create_context GMFull in
-	let json = jarray (List.map (generate_module_type ctx) types) in
-	t();
-	let t = Timer.timer ["generate";"json";"write"] in
-	let ch = open_out_bin file in
-	Json.write_json (output_string ch) json;
-	close_out ch;
-	t()
+let generate timer_ctx types file =
+	let json = Timer.time timer_ctx ["generate";"json";"construct"] (fun () ->
+		let ctx = create_context GMFull in
+		jarray (List.map (generate_module_type ctx) types)
+	) () in
+	Timer.time timer_ctx ["generate";"json";"write"] (fun () ->
+		let ch = open_out_bin file in
+		Json.write_json (output_string ch) json;
+		close_out ch;
+	) ()

+ 0 - 85
src/core/naming.ml

@@ -1,93 +1,8 @@
 open Globals
-open Ast
 open Type
 open Common
 open Error
 
-(** retrieve string from @:native metadata or raise Not_found *)
-let get_native_name meta =
-	let rec get_native meta = match meta with
-		| [] -> raise Not_found
-		| (Meta.Native,[v],p as meta) :: _ ->
-			meta
-		| _ :: meta ->
-			get_native meta
-	in
-	let (_,e,mp) = get_native meta in
-	match e with
-	| [Ast.EConst (Ast.String(name,_)),p] ->
-		name,p
-	| [] ->
-		raise Not_found
-	| _ ->
-		Error.raise_typing_error "String expected" mp
-
-(* Rewrites class or enum paths if @:native metadata is set *)
-let apply_native_paths t =
-	let get_real_name meta name =
-		let name',p = get_native_name meta in
-		(Meta.RealPath,[Ast.EConst (Ast.String (name,SDoubleQuotes)), p], p), name'
-	in
-	let get_real_path meta path =
-		let name,p = get_native_name meta in
-		(Meta.RealPath,[Ast.EConst (Ast.String (s_type_path path,SDoubleQuotes)), p], p), parse_path name
-	in
-	try
-		(match t with
-		| TClassDecl c ->
-			let did_change = ref false in
-			let field cf = try
-				let meta,name = get_real_name cf.cf_meta cf.cf_name in
-				cf.cf_name <- name;
-				cf.cf_meta <- meta :: cf.cf_meta;
-				List.iter (fun cf -> cf.cf_name <- name) cf.cf_overloads;
-				did_change := true
-			with Not_found ->
-				()
-			in
-			let fields cfs old_map =
-				did_change := false;
-				List.iter field cfs;
-				if !did_change then
-					List.fold_left (fun map f -> PMap.add f.cf_name f map) PMap.empty cfs
-				else
-					old_map
-			in
-			c.cl_fields <- fields c.cl_ordered_fields c.cl_fields;
-			c.cl_statics <- fields c.cl_ordered_statics c.cl_statics;
-			let meta,path = get_real_path c.cl_meta c.cl_path in
-			c.cl_meta <- meta :: c.cl_meta;
-			c.cl_path <- path;
-		| TEnumDecl e ->
-			let did_change = ref false in
-			let field _ ef = try
-				let meta,name = get_real_name ef.ef_meta ef.ef_name in
-				ef.ef_name <- name;
-				ef.ef_meta <- meta :: ef.ef_meta;
-				did_change := true;
-			with Not_found ->
-				()
-			in
-			PMap.iter field e.e_constrs;
-			if !did_change then begin
-				let names = ref [] in
-				e.e_constrs <- PMap.fold
-					(fun ef map ->
-						names := ef.ef_name :: !names;
-						PMap.add ef.ef_name ef map
-					)
-					e.e_constrs PMap.empty;
-				e.e_names <- !names;
-			end;
-			let meta,path = get_real_path e.e_meta e.e_path in
-			e.e_meta <- meta :: e.e_meta;
-			e.e_path <- path;
-		| _ ->
-			())
-	with Not_found ->
-		()
-
-
 let display_identifier_error com ?prepend_msg msg p =
 	let prepend = match prepend_msg with Some s -> s ^ " " | _ -> "" in
 	Common.display_error com (prepend ^ msg) p

+ 86 - 0
src/core/native.ml

@@ -0,0 +1,86 @@
+open Globals
+open Ast
+open Type
+
+(** retrieve string from @:native metadata or raise Not_found *)
+let get_native_name meta =
+	let rec get_native meta = match meta with
+		| [] -> raise Not_found
+		| (Meta.Native,[v],p as meta) :: _ ->
+			meta
+		| _ :: meta ->
+			get_native meta
+	in
+	let (_,e,mp) = get_native meta in
+	match e with
+	| [Ast.EConst (Ast.String(name,_)),p] ->
+		name,p
+	| [] ->
+		raise Not_found
+	| _ ->
+		Error.raise_typing_error "String expected" mp
+
+(* Rewrites class or enum paths if @:native metadata is set *)
+let apply_native_paths t =
+	let get_real_name meta name =
+		let name',p = get_native_name meta in
+		(Meta.RealPath,[Ast.EConst (Ast.String (name,SDoubleQuotes)), p], p), name'
+	in
+	let get_real_path meta path =
+		let name,p = get_native_name meta in
+		(Meta.RealPath,[Ast.EConst (Ast.String (s_type_path path,SDoubleQuotes)), p], p), parse_path name
+	in
+	try
+		(match t with
+		| TClassDecl c ->
+			let did_change = ref false in
+			let field cf = try
+				let meta,name = get_real_name cf.cf_meta cf.cf_name in
+				cf.cf_name <- name;
+				cf.cf_meta <- meta :: cf.cf_meta;
+				List.iter (fun cf -> cf.cf_name <- name) cf.cf_overloads;
+				did_change := true
+			with Not_found ->
+				()
+			in
+			let fields cfs old_map =
+				did_change := false;
+				List.iter field cfs;
+				if !did_change then
+					List.fold_left (fun map f -> PMap.add f.cf_name f map) PMap.empty cfs
+				else
+					old_map
+			in
+			c.cl_fields <- fields c.cl_ordered_fields c.cl_fields;
+			c.cl_statics <- fields c.cl_ordered_statics c.cl_statics;
+			let meta,path = get_real_path c.cl_meta c.cl_path in
+			c.cl_meta <- meta :: c.cl_meta;
+			c.cl_path <- path;
+		| TEnumDecl e ->
+			let did_change = ref false in
+			let field _ ef = try
+				let meta,name = get_real_name ef.ef_meta ef.ef_name in
+				ef.ef_name <- name;
+				ef.ef_meta <- meta :: ef.ef_meta;
+				did_change := true;
+			with Not_found ->
+				()
+			in
+			PMap.iter field e.e_constrs;
+			if !did_change then begin
+				let names = ref [] in
+				e.e_constrs <- PMap.fold
+					(fun ef map ->
+						names := ef.ef_name :: !names;
+						PMap.add ef.ef_name ef map
+					)
+					e.e_constrs PMap.empty;
+				e.e_names <- !names;
+			end;
+			let meta,path = get_real_path e.e_meta e.e_path in
+			e.e_meta <- meta :: e.e_meta;
+			e.e_path <- path;
+		| _ ->
+			())
+	with Not_found ->
+		()

+ 3 - 1
src/core/path.ml

@@ -317,6 +317,8 @@ let module_name_of_file file =
 	| [] ->
 		Globals.die "" __LOC__
 
+let mkdir_mutex = Mutex.create ()
+
 let rec create_file bin ext acc = function
 	| [] -> Globals.die "" __LOC__
 	| d :: [] ->
@@ -327,7 +329,7 @@ let rec create_file bin ext acc = function
 		ch
 	| d :: l ->
 		let dir = String.concat "/" (List.rev (d :: acc)) in
-		if not (Sys.file_exists (remove_trailing_slash dir)) then Unix.mkdir dir 0o755;
+		Mutex.protect mkdir_mutex (fun () -> if not (Sys.file_exists (remove_trailing_slash dir)) then Unix.mkdir dir 0o755);
 		create_file bin ext (d :: acc) l
 
 let rec mkdir_recursive base dir_list =

Některé soubory nejsou zobrazeny, neboť je v těchto rozdílových datech změněno mnoho souborů