azure-pipelines.yml 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303
  1. variables:
  2. - group: variables-haxe
  3. - name: AZURE_PIPELINES_REPO_URL
  4. value: $(Build.Repository.Uri)
  5. - name: AZURE_PIPELINES_BRANCH
  6. value: $(Build.SourceBranchName)
  7. stages:
  8. - stage: StageTest
  9. jobs:
  10. - template: extra/azure-pipelines/build-linux.yml
  11. parameters:
  12. name: BuildLinux
  13. - template: extra/azure-pipelines/build-mac.yml
  14. parameters:
  15. name: BuildMac
  16. - template: extra/azure-pipelines/build-windows.yml
  17. parameters:
  18. name: BuildWin64
  19. arch: '64'
  20. - template: extra/azure-pipelines/build-windows.yml
  21. parameters:
  22. name: BuildWin32
  23. arch: '32'
  24. - job: TestLinux
  25. dependsOn: BuildLinux
  26. pool:
  27. vmImage: 'ubuntu-16.04'
  28. strategy:
  29. matrix:
  30. macro:
  31. TEST: macro
  32. neko:
  33. TEST: neko
  34. hl:
  35. TEST: hl
  36. APT_PACKAGES: cmake ninja-build
  37. cpp:
  38. TEST: cpp
  39. HXCPP_COMPILE_CACHE: ~/hxcache
  40. APT_PACKAGES: gcc-multilib g++-multilib
  41. java:
  42. TEST: java,jvm
  43. cs:
  44. TEST: cs
  45. js:
  46. TEST: js
  47. SAUCE: 1
  48. SAUCE_TUNNEL_ID: $(Agent.JobName)
  49. SAUCE_BUILD: $(Build.BuildNumber)
  50. php:
  51. TEST: php
  52. flash:
  53. TEST: flash9,as3
  54. APT_PACKAGES: libglib2.0 libfreetype6 xvfb
  55. DISPLAY: ':99.0'
  56. AUDIODEV: 'null'
  57. python:
  58. TEST: python
  59. lua:
  60. TEST: lua
  61. APT_PACKAGES: ncurses-dev
  62. steps:
  63. - checkout: self
  64. fetchDepth: 20
  65. - template: extra/azure-pipelines/install-neko-snapshot.yaml
  66. parameters:
  67. platform: linux64
  68. - task: DownloadPipelineArtifact@0
  69. inputs:
  70. artifactName: 'linuxBinaries'
  71. targetPath: linuxBinaries
  72. - script: |
  73. set -ex
  74. tar -xf linuxBinaries/*_bin.tar.gz -C linuxBinaries --strip-components=1
  75. sudo mkdir -p /usr/local/bin/
  76. sudo mkdir -p /usr/local/share/haxe/
  77. sudo ln -s `pwd`/linuxBinaries/haxe /usr/local/bin/haxe
  78. sudo ln -s `pwd`/linuxBinaries/haxelib /usr/local/bin/haxelib
  79. sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
  80. displayName: Setup Haxe
  81. - script: haxe -version
  82. displayName: Print Haxe version
  83. - script: |
  84. set -ex
  85. mkdir ~/haxelib
  86. haxelib setup ~/haxelib
  87. displayName: Setup haxelib
  88. - script: |
  89. set -ex
  90. sudo apt update -qqy
  91. sudo apt install -qqy $APT_PACKAGES
  92. condition: and(succeeded(), variables['APT_PACKAGES'])
  93. displayName: Install apt packages
  94. - script: haxe RunCi.hxml
  95. workingDirectory: $(Build.SourcesDirectory)/tests
  96. env:
  97. ${{ if variables['SAUCE_ACCESS_KEY'] }}:
  98. SAUCE_ACCESS_KEY: $(SAUCE_ACCESS_KEY)
  99. displayName: Test
  100. - job: TestMac
  101. dependsOn: BuildMac
  102. pool:
  103. vmImage: 'macOS-10.13'
  104. strategy:
  105. matrix:
  106. macro:
  107. TEST: macro
  108. neko:
  109. TEST: neko
  110. hl:
  111. TEST: hl
  112. BREW_PACKAGES: ninja
  113. cpp:
  114. TEST: cpp
  115. HXCPP_COMPILE_CACHE: ~/hxcache
  116. java:
  117. TEST: java,jvm
  118. cs:
  119. TEST: cs
  120. js:
  121. TEST: js
  122. php:
  123. TEST: php
  124. flash:
  125. TEST: flash9,as3
  126. python:
  127. TEST: python
  128. lua:
  129. TEST: lua
  130. steps:
  131. - checkout: self
  132. fetchDepth: 20
  133. - template: extra/azure-pipelines/install-neko-snapshot.yaml
  134. parameters:
  135. platform: mac
  136. - task: DownloadPipelineArtifact@0
  137. inputs:
  138. artifactName: 'macBinaries'
  139. targetPath: macBinaries
  140. - script: |
  141. set -ex
  142. tar -xf macBinaries/*_bin.tar.gz -C macBinaries --strip-components=1
  143. sudo mkdir -p /usr/local/bin/
  144. sudo mkdir -p /usr/local/share/haxe/
  145. sudo ln -s `pwd`/macBinaries/haxe /usr/local/bin/haxe
  146. sudo ln -s `pwd`/macBinaries/haxelib /usr/local/bin/haxelib
  147. sudo ln -s `pwd`/macBinaries/std /usr/local/share/haxe/std
  148. displayName: Setup Haxe
  149. - script: haxe -version
  150. displayName: Print Haxe version
  151. - script: |
  152. set -ex
  153. mkdir ~/haxelib
  154. haxelib setup ~/haxelib
  155. displayName: Setup haxelib
  156. - script: brew install $BREW_PACKAGES
  157. condition: and(succeeded(), variables['BREW_PACKAGES'])
  158. displayName: Install homebrew packages
  159. - script: haxe RunCi.hxml
  160. workingDirectory: $(Build.SourcesDirectory)/tests
  161. displayName: Test
  162. - template: extra/azure-pipelines/test-windows.yml
  163. parameters:
  164. name: TestWin64
  165. arch: '64'
  166. - template: extra/azure-pipelines/test-windows.yml
  167. parameters:
  168. name: TestWin32
  169. arch: '32'
  170. - stage: StageDeploy
  171. condition: and(succeeded(), not(variables['System.PullRequest.PullRequestId']))
  172. jobs:
  173. - job: S3
  174. condition: and(succeeded(), variables['HXBUILDS_AWS_ACCESS_KEY_ID'], variables['HXBUILDS_S3ADDR'])
  175. pool:
  176. vmImage: 'ubuntu-16.04'
  177. steps:
  178. - checkout: self
  179. fetchDepth: 20
  180. - task: DownloadPipelineArtifact@0
  181. inputs:
  182. artifactName: 'linuxBinaries'
  183. targetPath: linuxBinaries
  184. displayName: Download linuxBinaries
  185. - task: DownloadPipelineArtifact@0
  186. inputs:
  187. artifactName: 'macBinaries'
  188. targetPath: macBinaries
  189. displayName: Download macBinaries
  190. - task: DownloadPipelineArtifact@0
  191. inputs:
  192. artifactName: 'win64Binaries'
  193. targetPath: win64Binaries
  194. displayName: Download win64Binaries
  195. - task: DownloadPipelineArtifact@0
  196. inputs:
  197. artifactName: 'win32Binaries'
  198. targetPath: win32Binaries
  199. displayName: Download win32Binaries
  200. - template: extra/azure-pipelines/install-neko-snapshot.yaml
  201. parameters:
  202. platform: linux64
  203. - script: |
  204. set -ex
  205. tar -xf linuxBinaries/*_bin.tar.gz -C linuxBinaries --strip-components=1
  206. sudo mkdir -p /usr/local/bin/
  207. sudo mkdir -p /usr/local/share/haxe/
  208. sudo ln -s `pwd`/linuxBinaries/haxe /usr/local/bin/haxe
  209. sudo ln -s `pwd`/linuxBinaries/haxelib /usr/local/bin/haxelib
  210. sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
  211. displayName: Setup Haxe
  212. - script: |
  213. set -ex
  214. sudo apt-get update -qqy
  215. sudo apt-get install -qqy awscli
  216. displayName: "Install awscli"
  217. - script: |
  218. set -ex
  219. COMMIT_HASH=`git rev-parse HEAD`
  220. COMMIT_HASH_SHORT=${COMMIT_HASH:0:7}
  221. COMMIT_DATE=`TZ=UTC git show --quiet --date='format-local:%Y-%m-%d' --format="%cd"`
  222. FILE_NAME=haxe_${COMMIT_DATE}_$(Build.SourceBranchName)_${COMMIT_HASH_SHORT}
  223. aws s3 cp linuxBinaries/*_bin.tar.gz $(HXBUILDS_S3ADDR)/haxe/linux64/${FILE_NAME}.tar.gz
  224. aws s3 cp macBinaries/*_bin.tar.gz $(HXBUILDS_S3ADDR)/haxe/mac/${FILE_NAME}.tar.gz
  225. aws s3 cp macBinaries/*_installer.tar.gz $(HXBUILDS_S3ADDR)/haxe/mac-installer/${FILE_NAME}.tar.gz
  226. aws s3 cp win64Binaries/*_bin.zip $(HXBUILDS_S3ADDR)/haxe/windows64/${FILE_NAME}.zip
  227. aws s3 cp win64Binaries/*_installer.zip $(HXBUILDS_S3ADDR)/haxe/windows64-installer/${FILE_NAME}.zip
  228. aws s3 cp win64Binaries/*.nupkg $(HXBUILDS_S3ADDR)/haxe/windows64-choco/
  229. aws s3 cp win32Binaries/*_bin.zip $(HXBUILDS_S3ADDR)/haxe/windows/${FILE_NAME}.zip
  230. aws s3 cp win32Binaries/*_installer.zip $(HXBUILDS_S3ADDR)/haxe/windows-installer/${FILE_NAME}.zip
  231. aws s3 cp win32Binaries/*.nupkg $(HXBUILDS_S3ADDR)/haxe/windows-choco/
  232. env:
  233. AWS_ACCESS_KEY_ID: $(HXBUILDS_AWS_ACCESS_KEY_ID)
  234. AWS_SECRET_ACCESS_KEY: $(HXBUILDS_AWS_SECRET_ACCESS_KEY)
  235. displayName: Upload binaries
  236. - script: |
  237. set -ex
  238. aws s3 cp linuxBinaries/*_bin.tar.gz $(HXBUILDS_S3ADDR)/haxe/linux64/haxe_latest.tar.gz
  239. aws s3 cp macBinaries/*_bin.tar.gz $(HXBUILDS_S3ADDR)/haxe/mac/haxe_latest.tar.gz
  240. aws s3 cp macBinaries/*_installer.tar.gz $(HXBUILDS_S3ADDR)/haxe/mac-installer/haxe_latest.tar.gz
  241. aws s3 cp win64Binaries/*_bin.zip $(HXBUILDS_S3ADDR)/haxe/windows64/haxe_latest.zip
  242. aws s3 cp win64Binaries/*_installer.zip $(HXBUILDS_S3ADDR)/haxe/windows64-installer/haxe_latest.zip
  243. aws s3 cp win32Binaries/*_bin.zip $(HXBUILDS_S3ADDR)/haxe/windows/haxe_latest.zip
  244. aws s3 cp win32Binaries/*_installer.zip $(HXBUILDS_S3ADDR)/haxe/windows-installer/haxe_latest.zip
  245. # Chocolatey packages have to be named with version number,
  246. # so let's use web redirection to keep the original file name.
  247. [[ "$HXBUILDS_S3ADDR" =~ s3://([^/]+)(.*) ]] && HXBUILDS_S3BUCKET="${BASH_REMATCH[1]}" && HXBUILDS_S3PATH="${BASH_REMATCH[2]}"
  248. [[ `echo win64Binaries/*.nupkg` =~ win64Binaries/(.+) ]] && FILE_NAME="${BASH_REMATCH[1]}"
  249. aws s3 cp $(HXBUILDS_S3ADDR)/haxe/windows64-choco/${FILE_NAME} $(HXBUILDS_S3ADDR)/haxe/windows64-choco/haxe_latest.nupkg --acl public-read --website-redirect "${HXBUILDS_S3PATH}/haxe/windows64-choco/${FILE_NAME}"
  250. [[ `echo win32Binaries/*.nupkg` =~ win32Binaries/(.+) ]] && FILE_NAME="${BASH_REMATCH[1]}"
  251. aws s3 cp $(HXBUILDS_S3ADDR)/haxe/windows-choco/${FILE_NAME} $(HXBUILDS_S3ADDR)/haxe/windows-choco/haxe_latest.nupkg --acl public-read --website-redirect "${HXBUILDS_S3PATH}/haxe/windows-choco/${FILE_NAME}"
  252. env:
  253. AWS_ACCESS_KEY_ID: $(HXBUILDS_AWS_ACCESS_KEY_ID)
  254. AWS_SECRET_ACCESS_KEY: $(HXBUILDS_AWS_SECRET_ACCESS_KEY)
  255. condition: and(succeeded(), eq(variables['Build.SourceBranchName'], 'development'))
  256. displayName: Update "latest"
  257. - job: ApiHaxeOrg
  258. condition: and(succeeded(), variables['GHP_USERNAME'], variables['GHP_EMAIL'])
  259. pool:
  260. vmImage: 'ubuntu-16.04'
  261. steps:
  262. - checkout: none
  263. - template: extra/azure-pipelines/install-neko-snapshot.yaml
  264. parameters:
  265. platform: linux64
  266. - task: DownloadPipelineArtifact@0
  267. inputs:
  268. artifactName: 'linuxBinaries'
  269. targetPath: linuxBinaries
  270. displayName: Download linuxBinaries
  271. - script: |
  272. set -ex
  273. tar -xf linuxBinaries/*_bin.tar.gz -C linuxBinaries --strip-components=1
  274. sudo mkdir -p /usr/local/bin/
  275. sudo mkdir -p /usr/local/share/haxe/
  276. sudo ln -s `pwd`/linuxBinaries/haxe /usr/local/bin/haxe
  277. sudo ln -s `pwd`/linuxBinaries/haxelib /usr/local/bin/haxelib
  278. sudo ln -s `pwd`/linuxBinaries/std /usr/local/share/haxe/std
  279. displayName: Setup Haxe
  280. - task: DownloadPipelineArtifact@0
  281. inputs:
  282. artifactName: 'xmldoc'
  283. targetPath: xmldoc
  284. displayName: Download xmldoc
  285. - script: |
  286. set -ex
  287. LOCAL="`pwd`/extra/api.haxe.org"
  288. git clone "${GHP_REMOTE}" "${LOCAL}"
  289. haxe --cwd "${LOCAL}" --run ImportXml "`pwd`/xmldoc"
  290. env:
  291. GHP_REMOTE: $(GHP_REMOTE)
  292. displayName: Deploy to api.haxe.org