release.yml 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792
  1. name: Release
  2. on:
  3. workflow_dispatch: # allows manual triggering
  4. inputs:
  5. create_release:
  6. description: 'Create new release'
  7. required: true
  8. type: boolean
  9. push:
  10. branches:
  11. - master
  12. paths: ['.github/workflows/release.yml', '**/CMakeLists.txt', '**/.cmake', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.cuh', '**/*.swift', '**/*.m', '**/*.metal', '**/*.comp']
  13. concurrency:
  14. group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
  15. cancel-in-progress: true
  16. env:
  17. BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
  18. CMAKE_ARGS: "-DLLAMA_BUILD_EXAMPLES=OFF -DLLAMA_BUILD_TESTS=OFF -DLLAMA_BUILD_TOOLS=ON -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON"
  19. jobs:
  20. macOS-arm64:
  21. runs-on: macos-14
  22. steps:
  23. - name: Clone
  24. id: checkout
  25. uses: actions/checkout@v4
  26. with:
  27. fetch-depth: 0
  28. - name: ccache
  29. uses: ggml-org/ccache-action@v1.2.16
  30. with:
  31. key: macOS-latest-cmake-arm64
  32. evict-old-files: 1d
  33. - name: Dependencies
  34. id: depends
  35. continue-on-error: true
  36. run: |
  37. brew update
  38. brew install curl
  39. - name: Build
  40. id: cmake_build
  41. run: |
  42. sysctl -a
  43. cmake -B build \
  44. -DCMAKE_INSTALL_RPATH='@loader_path' \
  45. -DCMAKE_BUILD_WITH_INSTALL_RPATH=ON \
  46. -DLLAMA_FATAL_WARNINGS=ON \
  47. -DGGML_METAL_USE_BF16=ON \
  48. -DGGML_METAL_EMBED_LIBRARY=ON \
  49. -DGGML_RPC=ON \
  50. ${{ env.CMAKE_ARGS }}
  51. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
  52. - name: Determine tag name
  53. id: tag
  54. uses: ./.github/actions/get-tag-name
  55. - name: Pack artifacts
  56. id: pack_artifacts
  57. run: |
  58. cp LICENSE ./build/bin/
  59. zip -r llama-${{ steps.tag.outputs.name }}-bin-macos-arm64.zip ./build/bin/*
  60. - name: Upload artifacts
  61. uses: actions/upload-artifact@v4
  62. with:
  63. path: llama-${{ steps.tag.outputs.name }}-bin-macos-arm64.zip
  64. name: llama-bin-macos-arm64.zip
  65. macOS-x64:
  66. runs-on: macos-13
  67. steps:
  68. - name: Clone
  69. id: checkout
  70. uses: actions/checkout@v4
  71. with:
  72. fetch-depth: 0
  73. - name: ccache
  74. uses: ggml-org/ccache-action@v1.2.16
  75. with:
  76. key: macOS-latest-cmake-x64
  77. evict-old-files: 1d
  78. - name: Dependencies
  79. id: depends
  80. continue-on-error: true
  81. run: |
  82. brew update
  83. brew install curl
  84. - name: Build
  85. id: cmake_build
  86. run: |
  87. sysctl -a
  88. # Metal is disabled due to intermittent failures with Github runners not having a GPU:
  89. # https://github.com/ggml-org/llama.cpp/actions/runs/8635935781/job/23674807267#step:5:2313
  90. cmake -B build \
  91. -DCMAKE_INSTALL_RPATH='@loader_path' \
  92. -DCMAKE_BUILD_WITH_INSTALL_RPATH=ON \
  93. -DLLAMA_FATAL_WARNINGS=ON \
  94. -DGGML_METAL=OFF \
  95. -DGGML_RPC=ON \
  96. -DCMAKE_OSX_DEPLOYMENT_TARGET=13.3
  97. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
  98. - name: Determine tag name
  99. id: tag
  100. uses: ./.github/actions/get-tag-name
  101. - name: Pack artifacts
  102. id: pack_artifacts
  103. run: |
  104. cp LICENSE ./build/bin/
  105. zip -r llama-${{ steps.tag.outputs.name }}-bin-macos-x64.zip ./build/bin/*
  106. - name: Upload artifacts
  107. uses: actions/upload-artifact@v4
  108. with:
  109. path: llama-${{ steps.tag.outputs.name }}-bin-macos-x64.zip
  110. name: llama-bin-macos-x64.zip
  111. ubuntu-22-cpu:
  112. strategy:
  113. matrix:
  114. include:
  115. - build: 'x64'
  116. os: ubuntu-22.04
  117. # GGML_BACKEND_DL and GGML_CPU_ALL_VARIANTS are not currently supported on arm
  118. # - build: 'arm64'
  119. # os: ubuntu-22.04-arm
  120. runs-on: ${{ matrix.os }}
  121. steps:
  122. - name: Clone
  123. id: checkout
  124. uses: actions/checkout@v4
  125. with:
  126. fetch-depth: 0
  127. - name: ccache
  128. uses: ggml-org/ccache-action@v1.2.16
  129. with:
  130. key: ubuntu-cpu-cmake
  131. evict-old-files: 1d
  132. - name: Dependencies
  133. id: depends
  134. run: |
  135. sudo apt-get update
  136. sudo apt-get install build-essential libcurl4-openssl-dev
  137. - name: Build
  138. id: cmake_build
  139. run: |
  140. cmake -B build \
  141. -DCMAKE_INSTALL_RPATH='$ORIGIN' \
  142. -DCMAKE_BUILD_WITH_INSTALL_RPATH=ON \
  143. -DGGML_BACKEND_DL=ON \
  144. -DGGML_NATIVE=OFF \
  145. -DGGML_CPU_ALL_VARIANTS=ON \
  146. -DLLAMA_FATAL_WARNINGS=ON \
  147. ${{ env.CMAKE_ARGS }}
  148. cmake --build build --config Release -j $(nproc)
  149. - name: Determine tag name
  150. id: tag
  151. uses: ./.github/actions/get-tag-name
  152. - name: Pack artifacts
  153. id: pack_artifacts
  154. run: |
  155. cp LICENSE ./build/bin/
  156. zip -r llama-${{ steps.tag.outputs.name }}-bin-ubuntu-${{ matrix.build }}.zip ./build/bin/*
  157. - name: Upload artifacts
  158. uses: actions/upload-artifact@v4
  159. with:
  160. path: llama-${{ steps.tag.outputs.name }}-bin-ubuntu-${{ matrix.build }}.zip
  161. name: llama-bin-ubuntu-${{ matrix.build }}.zip
  162. ubuntu-22-vulkan:
  163. runs-on: ubuntu-22.04
  164. steps:
  165. - name: Clone
  166. id: checkout
  167. uses: actions/checkout@v4
  168. with:
  169. fetch-depth: 0
  170. - name: ccache
  171. uses: ggml-org/ccache-action@v1.2.16
  172. with:
  173. key: ubuntu-22-cmake-vulkan
  174. evict-old-files: 1d
  175. - name: Dependencies
  176. id: depends
  177. run: |
  178. wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | sudo apt-key add -
  179. sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list
  180. sudo apt-get update -y
  181. sudo apt-get install -y build-essential mesa-vulkan-drivers vulkan-sdk libcurl4-openssl-dev
  182. - name: Build
  183. id: cmake_build
  184. run: |
  185. cmake -B build \
  186. -DCMAKE_INSTALL_RPATH='$ORIGIN' \
  187. -DCMAKE_BUILD_WITH_INSTALL_RPATH=ON \
  188. -DGGML_BACKEND_DL=ON \
  189. -DGGML_NATIVE=OFF \
  190. -DGGML_CPU_ALL_VARIANTS=ON \
  191. -DGGML_VULKAN=ON \
  192. ${{ env.CMAKE_ARGS }}
  193. cmake --build build --config Release -j $(nproc)
  194. - name: Determine tag name
  195. id: tag
  196. uses: ./.github/actions/get-tag-name
  197. - name: Pack artifacts
  198. id: pack_artifacts
  199. run: |
  200. cp LICENSE ./build/bin/
  201. zip -r llama-${{ steps.tag.outputs.name }}-bin-ubuntu-vulkan-x64.zip ./build/bin/*
  202. - name: Upload artifacts
  203. uses: actions/upload-artifact@v4
  204. with:
  205. path: llama-${{ steps.tag.outputs.name }}-bin-ubuntu-vulkan-x64.zip
  206. name: llama-bin-ubuntu-vulkan-x64.zip
  207. windows-cpu:
  208. runs-on: windows-2025
  209. strategy:
  210. matrix:
  211. include:
  212. - arch: 'x64'
  213. - arch: 'arm64'
  214. steps:
  215. - name: Clone
  216. uses: actions/checkout@v4
  217. with:
  218. fetch-depth: 0
  219. - name: ccache
  220. uses: ggml-org/ccache-action@v1.2.16
  221. with:
  222. key: windows-latest-cmake-cpu-${{ matrix.arch }}
  223. variant: ccache
  224. evict-old-files: 1d
  225. - name: Install Ninja
  226. run: |
  227. choco install ninja
  228. - name: libCURL
  229. id: get_libcurl
  230. uses: ./.github/actions/windows-setup-curl
  231. with:
  232. architecture: ${{ matrix.arch == 'x64' && 'win64' || 'win64a' }}
  233. - name: Build
  234. shell: cmd
  235. env:
  236. CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
  237. run: |
  238. call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" ${{ matrix.arch == 'x64' && 'x64' || 'amd64_arm64' }}
  239. cmake -S . -B build -G "Ninja Multi-Config" ^
  240. -D CMAKE_TOOLCHAIN_FILE=cmake/${{ matrix.arch }}-windows-llvm.cmake ^
  241. -DGGML_NATIVE=OFF ^
  242. -DGGML_BACKEND_DL=ON ^
  243. -DGGML_CPU_ALL_VARIANTS=${{ matrix.arch == 'x64' && 'ON' || 'OFF' }} ^
  244. -DGGML_OPENMP=ON ^
  245. -DCURL_LIBRARY="%CURL_PATH%/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="%CURL_PATH%/include" ^
  246. ${{ env.CMAKE_ARGS }}
  247. cmake --build build --config Release
  248. - name: Pack artifacts
  249. id: pack_artifacts
  250. env:
  251. CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
  252. run: |
  253. Copy-Item $env:CURL_PATH\bin\libcurl-${{ matrix.arch }}.dll .\build\bin\Release\
  254. Copy-Item "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Redist\MSVC\14.44.35112\debug_nonredist\${{ matrix.arch }}\Microsoft.VC143.OpenMP.LLVM\libomp140.${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }}.dll" .\build\bin\Release\
  255. 7z a llama-bin-win-cpu-${{ matrix.arch }}.zip .\build\bin\Release\*
  256. - name: Upload artifacts
  257. uses: actions/upload-artifact@v4
  258. with:
  259. path: llama-bin-win-cpu-${{ matrix.arch }}.zip
  260. name: llama-bin-win-cpu-${{ matrix.arch }}.zip
  261. windows:
  262. runs-on: windows-2025
  263. env:
  264. OPENBLAS_VERSION: 0.3.23
  265. VULKAN_VERSION: 1.4.313.2
  266. strategy:
  267. matrix:
  268. include:
  269. - backend: 'vulkan'
  270. arch: 'x64'
  271. defines: '-DGGML_VULKAN=ON'
  272. target: 'ggml-vulkan'
  273. - backend: 'opencl-adreno'
  274. arch: 'arm64'
  275. defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" -DGGML_OPENCL=ON -DGGML_OPENCL_USE_ADRENO_KERNELS=ON'
  276. target: 'ggml-opencl'
  277. steps:
  278. - name: Clone
  279. id: checkout
  280. uses: actions/checkout@v4
  281. - name: ccache
  282. uses: ggml-org/ccache-action@v1.2.16
  283. with:
  284. key: windows-latest-cmake-${{ matrix.backend }}-${{ matrix.arch }}
  285. variant: ccache
  286. evict-old-files: 1d
  287. - name: Install Vulkan SDK
  288. id: get_vulkan
  289. if: ${{ matrix.backend == 'vulkan' }}
  290. run: |
  291. curl.exe -o $env:RUNNER_TEMP/VulkanSDK-Installer.exe -L "https://sdk.lunarg.com/sdk/download/${env:VULKAN_VERSION}/windows/vulkansdk-windows-X64-${env:VULKAN_VERSION}.exe"
  292. & "$env:RUNNER_TEMP\VulkanSDK-Installer.exe" --accept-licenses --default-answer --confirm-command install
  293. Add-Content $env:GITHUB_ENV "VULKAN_SDK=C:\VulkanSDK\${env:VULKAN_VERSION}"
  294. Add-Content $env:GITHUB_PATH "C:\VulkanSDK\${env:VULKAN_VERSION}\bin"
  295. - name: Install Ninja
  296. id: install_ninja
  297. run: |
  298. choco install ninja
  299. - name: Install OpenCL Headers and Libs
  300. id: install_opencl
  301. if: ${{ matrix.backend == 'opencl-adreno' && matrix.arch == 'arm64' }}
  302. run: |
  303. git clone https://github.com/KhronosGroup/OpenCL-Headers
  304. cd OpenCL-Headers
  305. cmake -B build `
  306. -DBUILD_TESTING=OFF `
  307. -DOPENCL_HEADERS_BUILD_TESTING=OFF `
  308. -DOPENCL_HEADERS_BUILD_CXX_TESTS=OFF `
  309. -DCMAKE_INSTALL_PREFIX="$env:RUNNER_TEMP/opencl-arm64-release"
  310. cmake --build build --target install
  311. git clone https://github.com/KhronosGroup/OpenCL-ICD-Loader
  312. cd OpenCL-ICD-Loader
  313. cmake -B build-arm64-release `
  314. -A arm64 `
  315. -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" `
  316. -DCMAKE_INSTALL_PREFIX="$env:RUNNER_TEMP/opencl-arm64-release"
  317. cmake --build build-arm64-release --target install --config release
  318. - name: Build
  319. id: cmake_build
  320. run: |
  321. cmake -S . -B build ${{ matrix.defines }} -DGGML_NATIVE=OFF -DGGML_CPU=OFF -DGGML_BACKEND_DL=ON -DLLAMA_CURL=OFF
  322. cmake --build build --config Release --target ${{ matrix.target }}
  323. - name: Pack artifacts
  324. id: pack_artifacts
  325. run: |
  326. 7z a llama-bin-win-${{ matrix.backend }}-${{ matrix.arch }}.zip .\build\bin\Release\${{ matrix.target }}.dll
  327. - name: Upload artifacts
  328. uses: actions/upload-artifact@v4
  329. with:
  330. path: llama-bin-win-${{ matrix.backend }}-${{ matrix.arch }}.zip
  331. name: llama-bin-win-${{ matrix.backend }}-${{ matrix.arch }}.zip
  332. windows-cuda:
  333. runs-on: windows-2022
  334. strategy:
  335. matrix:
  336. cuda: ['12.4']
  337. steps:
  338. - name: Clone
  339. id: checkout
  340. uses: actions/checkout@v4
  341. - name: Install ccache
  342. uses: ggml-org/ccache-action@v1.2.16
  343. with:
  344. key: windows-cuda-${{ matrix.cuda }}
  345. variant: ccache
  346. evict-old-files: 1d
  347. - name: Install Cuda Toolkit
  348. uses: ./.github/actions/windows-setup-cuda
  349. with:
  350. cuda_version: ${{ matrix.cuda }}
  351. - name: Install Ninja
  352. id: install_ninja
  353. run: |
  354. choco install ninja
  355. - name: Build
  356. id: cmake_build
  357. shell: cmd
  358. run: |
  359. call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" x64
  360. cmake -S . -B build -G "Ninja Multi-Config" ^
  361. -DGGML_BACKEND_DL=ON ^
  362. -DGGML_NATIVE=OFF ^
  363. -DGGML_CPU=OFF ^
  364. -DGGML_CUDA=ON ^
  365. -DLLAMA_CURL=OFF
  366. set /A NINJA_JOBS=%NUMBER_OF_PROCESSORS%-1
  367. cmake --build build --config Release -j %NINJA_JOBS% --target ggml-cuda
  368. - name: Pack artifacts
  369. id: pack_artifacts
  370. run: |
  371. 7z a llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip .\build\bin\Release\ggml-cuda.dll
  372. - name: Upload artifacts
  373. uses: actions/upload-artifact@v4
  374. with:
  375. path: llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip
  376. name: llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip
  377. - name: Copy and pack Cuda runtime
  378. run: |
  379. echo "Cuda install location: ${{ env.CUDA_PATH }}"
  380. $dst='.\build\bin\cudart\'
  381. robocopy "${{env.CUDA_PATH}}\bin" $dst cudart64_*.dll cublas64_*.dll cublasLt64_*.dll
  382. robocopy "${{env.CUDA_PATH}}\lib" $dst cudart64_*.dll cublas64_*.dll cublasLt64_*.dll
  383. 7z a cudart-llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip $dst\*
  384. - name: Upload Cuda runtime
  385. uses: actions/upload-artifact@v4
  386. with:
  387. path: cudart-llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip
  388. name: cudart-llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip
  389. windows-sycl:
  390. runs-on: windows-2022
  391. defaults:
  392. run:
  393. shell: bash
  394. env:
  395. WINDOWS_BASEKIT_URL: https://registrationcenter-download.intel.com/akdlm/IRC_NAS/7cd9bba0-7aab-4e30-b3ae-2221006a4a05/intel-oneapi-base-toolkit-2025.1.1.34_offline.exe
  396. WINDOWS_DPCPP_MKL: intel.oneapi.win.cpp-dpcpp-common:intel.oneapi.win.mkl.devel:intel.oneapi.win.dnnl:intel.oneapi.win.tbb.devel
  397. ONEAPI_ROOT: "C:/Program Files (x86)/Intel/oneAPI"
  398. steps:
  399. - name: Clone
  400. id: checkout
  401. uses: actions/checkout@v4
  402. - name: ccache
  403. uses: ggml-org/ccache-action@v1.2.16
  404. with:
  405. key: windows-latest-cmake-sycl
  406. variant: ccache
  407. evict-old-files: 1d
  408. - name: Install
  409. run: |
  410. scripts/install-oneapi.bat $WINDOWS_BASEKIT_URL $WINDOWS_DPCPP_MKL
  411. - name: Build
  412. id: cmake_build
  413. shell: cmd
  414. run: |
  415. call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force
  416. cmake -G "Ninja" -B build ^
  417. -DCMAKE_C_COMPILER=cl -DCMAKE_CXX_COMPILER=icx ^
  418. -DCMAKE_BUILD_TYPE=Release ^
  419. -DGGML_BACKEND_DL=ON -DBUILD_SHARED_LIBS=ON ^
  420. -DGGML_CPU=OFF -DGGML_SYCL=ON ^
  421. -DLLAMA_CURL=OFF
  422. cmake --build build --target ggml-sycl -j
  423. - name: Build the release package
  424. id: pack_artifacts
  425. run: |
  426. echo "cp oneAPI running time dll files in ${{ env.ONEAPI_ROOT }} to ./build/bin"
  427. cp "${{ env.ONEAPI_ROOT }}/mkl/latest/bin/mkl_sycl_blas.5.dll" ./build/bin
  428. cp "${{ env.ONEAPI_ROOT }}/mkl/latest/bin/mkl_core.2.dll" ./build/bin
  429. cp "${{ env.ONEAPI_ROOT }}/mkl/latest/bin/mkl_tbb_thread.2.dll" ./build/bin
  430. cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/ur_adapter_level_zero.dll" ./build/bin
  431. cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/ur_adapter_opencl.dll" ./build/bin
  432. cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/ur_loader.dll" ./build/bin
  433. cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/ur_win_proxy_loader.dll" ./build/bin
  434. cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/sycl8.dll" ./build/bin
  435. cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/svml_dispmd.dll" ./build/bin
  436. cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/libmmd.dll" ./build/bin
  437. cp "${{ env.ONEAPI_ROOT }}/compiler/latest/bin/libiomp5md.dll" ./build/bin
  438. cp "${{ env.ONEAPI_ROOT }}/dnnl/latest/bin/dnnl.dll" ./build/bin
  439. cp "${{ env.ONEAPI_ROOT }}/tbb/latest/bin/tbb12.dll" ./build/bin
  440. echo "cp oneAPI running time dll files to ./build/bin done"
  441. 7z a llama-bin-win-sycl-x64.zip ./build/bin/*
  442. - name: Upload the release package
  443. uses: actions/upload-artifact@v4
  444. with:
  445. path: llama-bin-win-sycl-x64.zip
  446. name: llama-bin-win-sycl-x64.zip
  447. windows-hip:
  448. runs-on: windows-2022
  449. env:
  450. # The ROCm version must correspond to the version used in the HIP SDK.
  451. ROCM_VERSION: "6.4.2"
  452. HIPSDK_INSTALLER_VERSION: "25.Q3"
  453. strategy:
  454. matrix:
  455. include:
  456. - name: "radeon"
  457. gpu_targets: "gfx1200;gfx1201;gfx1100;gfx1101;gfx1102;gfx1030;gfx1031;gfx1032"
  458. steps:
  459. - name: Clone
  460. id: checkout
  461. uses: actions/checkout@v4
  462. - name: Clone rocWMMA repository
  463. id: clone_rocwmma
  464. run: |
  465. git clone https://github.com/rocm/rocwmma --branch rocm-${{ env.ROCM_VERSION }} --depth 1
  466. - name: Cache ROCm Installation
  467. id: cache-rocm
  468. uses: actions/cache@v4
  469. with:
  470. path: C:\Program Files\AMD\ROCm
  471. key: rocm-${{ env.HIPSDK_INSTALLER_VERSION }}-${{ runner.os }}
  472. - name: ccache
  473. uses: ggml-org/ccache-action@v1.2.16
  474. with:
  475. key: windows-latest-cmake-hip-${{ env.HIPSDK_INSTALLER_VERSION }}-${{ matrix.name }}-x64
  476. evict-old-files: 1d
  477. - name: Install ROCm
  478. if: steps.cache-rocm.outputs.cache-hit != 'true'
  479. id: depends
  480. run: |
  481. $ErrorActionPreference = "Stop"
  482. write-host "Downloading AMD HIP SDK Installer"
  483. Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-${{ env.HIPSDK_INSTALLER_VERSION }}-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe"
  484. write-host "Installing AMD HIP SDK"
  485. $proc = Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -PassThru
  486. $completed = $proc.WaitForExit(600000)
  487. if (-not $completed) {
  488. Write-Error "ROCm installation timed out after 10 minutes. Killing the process"
  489. $proc.Kill()
  490. exit 1
  491. }
  492. if ($proc.ExitCode -ne 0) {
  493. Write-Error "ROCm installation failed with exit code $($proc.ExitCode)"
  494. exit 1
  495. }
  496. write-host "Completed AMD HIP SDK installation"
  497. - name: Verify ROCm
  498. id: verify
  499. run: |
  500. # Find and test ROCm installation
  501. $clangPath = Get-ChildItem 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | Select-Object -First 1
  502. if (-not $clangPath) {
  503. Write-Error "ROCm installation not found"
  504. exit 1
  505. }
  506. & $clangPath.FullName --version
  507. - name: Build
  508. id: cmake_build
  509. run: |
  510. $env:HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path)
  511. $env:CMAKE_PREFIX_PATH="${env:HIP_PATH}"
  512. cmake -G "Unix Makefiles" -B build -S . `
  513. -DCMAKE_C_COMPILER="${env:HIP_PATH}\bin\clang.exe" `
  514. -DCMAKE_CXX_COMPILER="${env:HIP_PATH}\bin\clang++.exe" `
  515. -DCMAKE_CXX_FLAGS="-I$($PWD.Path.Replace('\', '/'))/rocwmma/library/include/ -Wno-ignored-attributes -Wno-nested-anon-types" `
  516. -DCMAKE_BUILD_TYPE=Release `
  517. -DGGML_BACKEND_DL=ON `
  518. -DGGML_NATIVE=OFF `
  519. -DGGML_CPU=OFF `
  520. -DAMDGPU_TARGETS="${{ matrix.gpu_targets }}" `
  521. -DGGML_HIP_ROCWMMA_FATTN=ON `
  522. -DGGML_HIP=ON `
  523. -DLLAMA_CURL=OFF
  524. cmake --build build --target ggml-hip -j ${env:NUMBER_OF_PROCESSORS}
  525. md "build\bin\rocblas\library\"
  526. md "build\bin\hipblaslt\library"
  527. cp "${env:HIP_PATH}\bin\hipblas.dll" "build\bin\"
  528. cp "${env:HIP_PATH}\bin\hipblaslt.dll" "build\bin\"
  529. cp "${env:HIP_PATH}\bin\rocblas.dll" "build\bin\"
  530. cp "${env:HIP_PATH}\bin\rocblas\library\*" "build\bin\rocblas\library\"
  531. cp "${env:HIP_PATH}\bin\hipblaslt\library\*" "build\bin\hipblaslt\library\"
  532. - name: Pack artifacts
  533. id: pack_artifacts
  534. run: |
  535. 7z a llama-bin-win-hip-${{ matrix.name }}-x64.zip .\build\bin\*
  536. - name: Upload artifacts
  537. uses: actions/upload-artifact@v4
  538. with:
  539. path: llama-bin-win-hip-${{ matrix.name }}-x64.zip
  540. name: llama-bin-win-hip-${{ matrix.name }}-x64.zip
  541. ios-xcode-build:
  542. runs-on: macos-15
  543. steps:
  544. - name: Checkout code
  545. uses: actions/checkout@v4
  546. with:
  547. fetch-depth: 0
  548. - name: Setup Xcode
  549. run: |
  550. sudo xcode-select -s /Applications/Xcode_16.4.app
  551. - name: Build
  552. id: cmake_build
  553. run: |
  554. sysctl -a
  555. cmake -B build -G Xcode \
  556. -DGGML_METAL_USE_BF16=ON \
  557. -DGGML_METAL_EMBED_LIBRARY=ON \
  558. -DLLAMA_CURL=OFF \
  559. -DLLAMA_BUILD_EXAMPLES=OFF \
  560. -DLLAMA_BUILD_TOOLS=OFF \
  561. -DLLAMA_BUILD_TESTS=OFF \
  562. -DLLAMA_BUILD_SERVER=OFF \
  563. -DCMAKE_SYSTEM_NAME=iOS \
  564. -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \
  565. -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
  566. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
  567. - name: xcodebuild for swift package
  568. id: xcodebuild
  569. run: |
  570. ./build-xcframework.sh
  571. - name: Build Xcode project
  572. run: xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' FRAMEWORK_FOLDER_PATH=./build-ios build
  573. - name: Determine tag name
  574. id: tag
  575. uses: ./.github/actions/get-tag-name
  576. - name: Pack artifacts
  577. id: pack_artifacts
  578. run: |
  579. zip --symlinks -r llama-${{ steps.tag.outputs.name }}-xcframework.zip build-apple/llama.xcframework
  580. - name: Upload artifacts
  581. uses: actions/upload-artifact@v4
  582. with:
  583. path: llama-${{ steps.tag.outputs.name }}-xcframework.zip
  584. name: llama-${{ steps.tag.outputs.name }}-xcframework
  585. release:
  586. if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' ) || github.event.inputs.create_release == 'true' }}
  587. # Fine-grant permission
  588. # https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#modifying-the-permissions-for-the-github_token
  589. permissions:
  590. contents: write # for creating release
  591. runs-on: ubuntu-latest
  592. needs:
  593. - windows
  594. - windows-cpu
  595. - windows-cuda
  596. - windows-sycl
  597. - windows-hip
  598. - ubuntu-22-cpu
  599. - ubuntu-22-vulkan
  600. - macOS-arm64
  601. - macOS-x64
  602. - ios-xcode-build
  603. steps:
  604. - name: Clone
  605. id: checkout
  606. uses: actions/checkout@v4
  607. with:
  608. fetch-depth: 0
  609. - name: Determine tag name
  610. id: tag
  611. uses: ./.github/actions/get-tag-name
  612. - name: Download artifacts
  613. id: download-artifact
  614. uses: actions/download-artifact@v4
  615. with:
  616. path: ./artifact
  617. merge-multiple: true
  618. - name: Move artifacts
  619. id: move_artifacts
  620. run: |
  621. mkdir -p release
  622. echo "Adding CPU backend files to existing zips..."
  623. for arch in x64 arm64; do
  624. cpu_zip="artifact/llama-bin-win-cpu-${arch}.zip"
  625. temp_dir=$(mktemp -d)
  626. echo "Extracting CPU backend for $arch..."
  627. unzip "$cpu_zip" -d "$temp_dir"
  628. echo "Adding CPU files to $arch zips..."
  629. for target_zip in artifact/llama-bin-win-*-${arch}.zip; do
  630. if [[ "$target_zip" == "$cpu_zip" ]]; then
  631. continue
  632. fi
  633. echo "Adding CPU backend to $(basename "$target_zip")"
  634. realpath_target_zip=$(realpath "$target_zip")
  635. (cd "$temp_dir" && zip -r "$realpath_target_zip" .)
  636. done
  637. rm -rf "$temp_dir"
  638. done
  639. echo "Renaming and moving zips to release..."
  640. for zip_file in artifact/llama-bin-win-*.zip; do
  641. base_name=$(basename "$zip_file" .zip)
  642. zip_name="llama-${{ steps.tag.outputs.name }}-${base_name#llama-}.zip"
  643. echo "Moving $zip_file to release/$zip_name"
  644. mv "$zip_file" "release/$zip_name"
  645. done
  646. echo "Moving other artifacts..."
  647. mv -v artifact/*.zip release
  648. - name: Create release
  649. id: create_release
  650. uses: ggml-org/action-create-release@v1
  651. env:
  652. GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
  653. with:
  654. tag_name: ${{ steps.tag.outputs.name }}
  655. - name: Upload release
  656. id: upload_release
  657. uses: actions/github-script@v3
  658. with:
  659. github-token: ${{secrets.GITHUB_TOKEN}}
  660. script: |
  661. const path = require('path');
  662. const fs = require('fs');
  663. const release_id = '${{ steps.create_release.outputs.id }}';
  664. for (let file of await fs.readdirSync('./release')) {
  665. if (path.extname(file) === '.zip') {
  666. console.log('uploadReleaseAsset', file);
  667. await github.repos.uploadReleaseAsset({
  668. owner: context.repo.owner,
  669. repo: context.repo.repo,
  670. release_id: release_id,
  671. name: file,
  672. data: await fs.readFileSync(`./release/${file}`)
  673. });
  674. }
  675. }