azure-pipelines.yml 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300
  1. variables:
  2. - group: variables-haxe
  3. - name: AZURE_PIPELINES_REPO_URL
  4. value: $(Build.Repository.Uri)
  5. - name: AZURE_PIPELINES_BRANCH
  6. value: $(Build.SourceBranchName)
  7. stages:
  8. - stage: StageTest
  9. jobs:
  10. - template: extra/azure-pipelines/build-linux.yml
  11. parameters:
  12. name: BuildLinux
  13. - template: extra/azure-pipelines/build-mac.yml
  14. parameters:
  15. name: BuildMac
  16. - template: extra/azure-pipelines/build-windows.yml
  17. parameters:
  18. name: BuildWin64
  19. arch: '64'
  20. - template: extra/azure-pipelines/build-windows.yml
  21. parameters:
  22. name: BuildWin32
  23. arch: '32'
  24. - job: TestLinux
  25. dependsOn: BuildLinux
  26. pool:
  27. vmImage: 'ubuntu-16.04'
  28. strategy:
  29. matrix:
  30. macro:
  31. TEST: macro
  32. neko:
  33. TEST: neko
  34. hl:
  35. TEST: hl
  36. APT_PACKAGES: cmake ninja-build
  37. cpp:
  38. TEST: cpp
  39. HXCPP_COMPILE_CACHE: ~/hxcache
  40. APT_PACKAGES: gcc-multilib g++-multilib
  41. java:
  42. TEST: java,jvm
  43. cs:
  44. TEST: cs
  45. js:
  46. TEST: js
  47. SAUCE: 1
  48. SAUCE_TUNNEL_ID: $(Agent.JobName)
  49. SAUCE_BUILD: $(Build.BuildNumber)
  50. php:
  51. TEST: php
  52. flash:
  53. TEST: flash9,as3
  54. APT_PACKAGES: libglib2.0 libfreetype6 xvfb
  55. DISPLAY: ':99.0'
  56. AUDIODEV: 'null'
  57. python:
  58. TEST: python
  59. lua:
  60. TEST: lua
  61. steps:
  62. - checkout: self
  63. fetchDepth: 20
  64. - template: extra/azure-pipelines/install-neko-snapshot.yaml
  65. parameters:
  66. platform: linux64
  67. - task: DownloadPipelineArtifact@0
  68. inputs:
  69. artifactName: 'linuxBinaries'
  70. targetPath: linuxBinaries
  71. - script: |
  72. set -ex
  73. tar -xf linuxBinaries/*_bin.tar.gz -C linuxBinaries --strip-components=1
  74. sudo mkdir -p /usr/local/bin/
  75. sudo mkdir -p /usr/local/share/haxe/
  76. sudo ln -s `pwd`/linuxBinaries/haxe /usr/local/bin/haxe
  77. sudo ln -s `pwd`/linuxBinaries/haxelib /usr/local/bin/haxelib
  78. sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
  79. displayName: Setup Haxe
  80. - script: haxe -version
  81. displayName: Print Haxe version
  82. - script: |
  83. set -ex
  84. mkdir ~/haxelib
  85. haxelib setup ~/haxelib
  86. displayName: Setup haxelib
  87. - script: |
  88. set -ex
  89. sudo apt update -qqy
  90. sudo apt install -qqy $APT_PACKAGES
  91. condition: and(succeeded(), variables['APT_PACKAGES'])
  92. displayName: Install apt packages
  93. - script: haxe RunCi.hxml
  94. workingDirectory: $(Build.SourcesDirectory)/tests
  95. env:
  96. ${{ if variables['SAUCE_ACCESS_KEY'] }}:
  97. SAUCE_ACCESS_KEY: $(SAUCE_ACCESS_KEY)
  98. displayName: Test
  99. - job: TestMac
  100. dependsOn: BuildMac
  101. pool:
  102. vmImage: 'macOS-10.13'
  103. strategy:
  104. matrix:
  105. macro:
  106. TEST: macro
  107. neko:
  108. TEST: neko
  109. hl:
  110. TEST: hl
  111. BREW_PACKAGES: ninja
  112. cpp:
  113. TEST: cpp
  114. HXCPP_COMPILE_CACHE: ~/hxcache
  115. java:
  116. TEST: java,jvm
  117. cs:
  118. TEST: cs
  119. js:
  120. TEST: js
  121. php:
  122. TEST: php
  123. flash:
  124. TEST: flash9,as3
  125. python:
  126. TEST: python
  127. lua:
  128. TEST: lua
  129. steps:
  130. - checkout: self
  131. fetchDepth: 20
  132. - template: extra/azure-pipelines/install-neko-snapshot.yaml
  133. parameters:
  134. platform: mac
  135. - task: DownloadPipelineArtifact@0
  136. inputs:
  137. artifactName: 'macBinaries'
  138. targetPath: macBinaries
  139. - script: |
  140. set -ex
  141. tar -xf macBinaries/*_bin.tar.gz -C macBinaries --strip-components=1
  142. sudo mkdir -p /usr/local/bin/
  143. sudo mkdir -p /usr/local/share/haxe/
  144. sudo ln -s `pwd`/macBinaries/haxe /usr/local/bin/haxe
  145. sudo ln -s `pwd`/macBinaries/haxelib /usr/local/bin/haxelib
  146. sudo ln -s `pwd`/macBinaries/std /usr/local/share/haxe/std
  147. displayName: Setup Haxe
  148. - script: haxe -version
  149. displayName: Print Haxe version
  150. - script: |
  151. set -ex
  152. mkdir ~/haxelib
  153. haxelib setup ~/haxelib
  154. displayName: Setup haxelib
  155. - script: brew install $BREW_PACKAGES
  156. condition: and(succeeded(), variables['BREW_PACKAGES'])
  157. displayName: Install homebrew packages
  158. - script: haxe RunCi.hxml
  159. workingDirectory: $(Build.SourcesDirectory)/tests
  160. displayName: Test
  161. - template: extra/azure-pipelines/test-windows.yml
  162. parameters:
  163. name: TestWin64
  164. arch: '64'
  165. - template: extra/azure-pipelines/test-windows.yml
  166. parameters:
  167. name: TestWin32
  168. arch: '32'
  169. - stage: StageDeploy
  170. condition: and(succeeded(), not(variables['System.PullRequest.PullRequestId']))
  171. jobs:
  172. - job: S3
  173. condition: and(succeeded(), variables['HXBUILDS_AWS_ACCESS_KEY_ID'], variables['HXBUILDS_S3ADDR'])
  174. pool:
  175. vmImage: 'ubuntu-16.04'
  176. steps:
  177. - checkout: self
  178. fetchDepth: 20
  179. - task: DownloadPipelineArtifact@0
  180. inputs:
  181. artifactName: 'linuxBinaries'
  182. targetPath: linuxBinaries
  183. displayName: Download linuxBinaries
  184. - task: DownloadPipelineArtifact@0
  185. inputs:
  186. artifactName: 'macBinaries'
  187. targetPath: macBinaries
  188. displayName: Download macBinaries
  189. - task: DownloadPipelineArtifact@0
  190. inputs:
  191. artifactName: 'win64Binaries'
  192. targetPath: win64Binaries
  193. displayName: Download win64Binaries
  194. - task: DownloadPipelineArtifact@0
  195. inputs:
  196. artifactName: 'win32Binaries'
  197. targetPath: win32Binaries
  198. displayName: Download win32Binaries
  199. - template: extra/azure-pipelines/install-neko-snapshot.yaml
  200. parameters:
  201. platform: linux64
  202. - script: |
  203. set -ex
  204. tar -xf linuxBinaries/*_bin.tar.gz -C linuxBinaries --strip-components=1
  205. sudo mkdir -p /usr/local/bin/
  206. sudo mkdir -p /usr/local/share/haxe/
  207. sudo ln -s `pwd`/linuxBinaries/haxe /usr/local/bin/haxe
  208. sudo ln -s `pwd`/linuxBinaries/haxelib /usr/local/bin/haxelib
  209. sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
  210. displayName: Setup Haxe
  211. - script: |
  212. set -ex
  213. sudo apt-get update -qqy
  214. sudo apt-get install -qqy awscli
  215. displayName: "Install awscli"
  216. - script: |
  217. set -ex
  218. COMMIT_HASH=`git rev-parse HEAD`
  219. COMMIT_HASH_SHORT=${COMMIT_HASH:0:7}
  220. COMMIT_DATE=`TZ=UTC git show --quiet --date='format-local:%Y-%m-%d' --format="%cd"`
  221. FILE_NAME=haxe_${COMMIT_DATE}_$(Build.SourceBranchName)_${COMMIT_HASH_SHORT}
  222. aws s3 cp linuxBinaries/*_bin.tar.gz $(HXBUILDS_S3ADDR)/haxe/linux64/${FILE_NAME}.tar.gz
  223. aws s3 cp macBinaries/*_bin.tar.gz $(HXBUILDS_S3ADDR)/haxe/mac/${FILE_NAME}.tar.gz
  224. aws s3 cp win64Binaries/*_bin.zip $(HXBUILDS_S3ADDR)/haxe/windows64/${FILE_NAME}.zip
  225. aws s3 cp win64Binaries/*_installer.zip $(HXBUILDS_S3ADDR)/haxe/windows64-installer/${FILE_NAME}.zip
  226. aws s3 cp win64Binaries/*.nupkg $(HXBUILDS_S3ADDR)/haxe/windows64-choco/
  227. aws s3 cp win32Binaries/*_bin.zip $(HXBUILDS_S3ADDR)/haxe/windows/${FILE_NAME}.zip
  228. aws s3 cp win32Binaries/*_installer.zip $(HXBUILDS_S3ADDR)/haxe/windows-installer/${FILE_NAME}.zip
  229. aws s3 cp win32Binaries/*.nupkg $(HXBUILDS_S3ADDR)/haxe/windows-choco/
  230. env:
  231. AWS_ACCESS_KEY_ID: $(HXBUILDS_AWS_ACCESS_KEY_ID)
  232. AWS_SECRET_ACCESS_KEY: $(HXBUILDS_AWS_SECRET_ACCESS_KEY)
  233. displayName: Upload binaries
  234. - script: |
  235. set -ex
  236. aws s3 cp linuxBinaries/*_bin.tar.gz $(HXBUILDS_S3ADDR)/haxe/linux64/haxe_latest.tar.gz
  237. aws s3 cp macBinaries/*_bin.tar.gz $(HXBUILDS_S3ADDR)/haxe/mac/haxe_latest.tar.gz
  238. aws s3 cp win64Binaries/*_bin.zip $(HXBUILDS_S3ADDR)/haxe/windows64/haxe_latest.zip
  239. aws s3 cp win64Binaries/*_installer.zip $(HXBUILDS_S3ADDR)/haxe/windows64-installer/haxe_latest.zip
  240. aws s3 cp win32Binaries/*_bin.zip $(HXBUILDS_S3ADDR)/haxe/windows/haxe_latest.zip
  241. aws s3 cp win32Binaries/*_installer.zip $(HXBUILDS_S3ADDR)/haxe/windows-installer/haxe_latest.zip
  242. # Chocolatey packages have to be named with version number,
  243. # so let's use web redirection to keep the original file name.
  244. [[ "$HXBUILDS_S3ADDR" =~ s3://([^/]+)(.*) ]] && HXBUILDS_S3BUCKET="${BASH_REMATCH[1]}" && HXBUILDS_S3PATH="${BASH_REMATCH[2]}"
  245. [[ `echo win64Binaries/*.nupkg` =~ win64Binaries/(.+) ]] && FILE_NAME="${BASH_REMATCH[1]}"
  246. aws s3 cp $(HXBUILDS_S3ADDR)/haxe/windows64-choco/${FILE_NAME} $(HXBUILDS_S3ADDR)/haxe/windows64-choco/haxe_latest.nupkg --acl public-read --website-redirect "${HXBUILDS_S3PATH}/haxe/windows64-choco/${FILE_NAME}"
  247. [[ `echo win32Binaries/*.nupkg` =~ win32Binaries/(.+) ]] && FILE_NAME="${BASH_REMATCH[1]}"
  248. aws s3 cp $(HXBUILDS_S3ADDR)/haxe/windows-choco/${FILE_NAME} $(HXBUILDS_S3ADDR)/haxe/windows-choco/haxe_latest.nupkg --acl public-read --website-redirect "${HXBUILDS_S3PATH}/haxe/windows-choco/${FILE_NAME}"
  249. env:
  250. AWS_ACCESS_KEY_ID: $(HXBUILDS_AWS_ACCESS_KEY_ID)
  251. AWS_SECRET_ACCESS_KEY: $(HXBUILDS_AWS_SECRET_ACCESS_KEY)
  252. condition: and(succeeded(), eq(variables['Build.SourceBranchName'], 'development'))
  253. displayName: Update "latest"
  254. - job: ApiHaxeOrg
  255. condition: and(succeeded(), variables['GHP_USERNAME'], variables['GHP_EMAIL'])
  256. pool:
  257. vmImage: 'ubuntu-16.04'
  258. steps:
  259. - checkout: none
  260. - template: extra/azure-pipelines/install-neko-snapshot.yaml
  261. parameters:
  262. platform: linux64
  263. - task: DownloadPipelineArtifact@0
  264. inputs:
  265. artifactName: 'linuxBinaries'
  266. targetPath: linuxBinaries
  267. displayName: Download linuxBinaries
  268. - script: |
  269. set -ex
  270. tar -xf linuxBinaries/*_bin.tar.gz -C linuxBinaries --strip-components=1
  271. sudo mkdir -p /usr/local/bin/
  272. sudo mkdir -p /usr/local/share/haxe/
  273. sudo ln -s `pwd`/linuxBinaries/haxe /usr/local/bin/haxe
  274. sudo ln -s `pwd`/linuxBinaries/haxelib /usr/local/bin/haxelib
  275. sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
  276. displayName: Setup Haxe
  277. - task: DownloadPipelineArtifact@0
  278. inputs:
  279. artifactName: 'xmldoc'
  280. targetPath: xmldoc
  281. displayName: Download xmldoc
  282. - script: |
  283. set -ex
  284. LOCAL="`pwd`/extra/api.haxe.org"
  285. git clone "${GHP_REMOTE}" "${LOCAL}"
  286. haxe --cwd "${LOCAL}" --run ImportXml "`pwd`/xmldoc"
  287. env:
  288. GHP_REMOTE: $(GHP_REMOTE)
  289. displayName: Deploy to api.haxe.org