build.yml 45 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517
  1. name: CI
  2. on:
  3. workflow_dispatch: # allows manual triggering
  4. push:
  5. branches:
  6. - master
  7. paths: [
  8. '.github/workflows/build.yml',
  9. '.github/workflows/build-linux-cross.yml',
  10. '.github/workflows/build-cmake-pkg.yml',
  11. '**/CMakeLists.txt',
  12. '**/.cmake',
  13. '**/*.h',
  14. '**/*.hpp',
  15. '**/*.c',
  16. '**/*.cpp',
  17. '**/*.cu',
  18. '**/*.cuh',
  19. '**/*.swift',
  20. '**/*.m',
  21. '**/*.metal',
  22. '**/*.comp'
  23. ]
  24. pull_request:
  25. types: [opened, synchronize, reopened]
  26. paths: [
  27. '.github/workflows/build.yml',
  28. '.github/workflows/build-linux-cross.yml',
  29. '.github/workflows/build-cmake-pkg.yml',
  30. '**/CMakeLists.txt',
  31. '**/.cmake',
  32. '**/*.h',
  33. '**/*.hpp',
  34. '**/*.c',
  35. '**/*.cpp',
  36. '**/*.cu',
  37. '**/*.cuh',
  38. '**/*.swift',
  39. '**/*.m',
  40. '**/*.metal',
  41. '**/*.comp'
  42. ]
  43. concurrency:
  44. group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
  45. cancel-in-progress: true
  46. env:
  47. GGML_NLOOP: 3
  48. GGML_N_THREADS: 1
  49. LLAMA_LOG_COLORS: 1
  50. LLAMA_LOG_PREFIX: 1
  51. LLAMA_LOG_TIMESTAMPS: 1
  52. jobs:
  53. macOS-latest-cmake-arm64:
  54. runs-on: macos-latest
  55. steps:
  56. - name: Clone
  57. id: checkout
  58. uses: actions/checkout@v4
  59. - name: ccache
  60. uses: ggml-org/ccache-action@v1.2.16
  61. with:
  62. key: macOS-latest-cmake-arm64
  63. evict-old-files: 1d
  64. - name: Dependencies
  65. id: depends
  66. continue-on-error: true
  67. run: |
  68. brew update
  69. brew install curl
  70. - name: Build
  71. id: cmake_build
  72. run: |
  73. sysctl -a
  74. cmake -B build \
  75. -DCMAKE_BUILD_RPATH="@loader_path" \
  76. -DLLAMA_FATAL_WARNINGS=ON \
  77. -DGGML_METAL_USE_BF16=ON \
  78. -DGGML_METAL_EMBED_LIBRARY=OFF \
  79. -DGGML_METAL_SHADER_DEBUG=ON \
  80. -DGGML_RPC=ON
  81. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
  82. leaks -atExit -- ./build/bin/test-thread-safety -hf ggml-org/gemma-3-270m-qat-GGUF -ngl 99 -p "$(printf 'hello %.0s' {1..128})" -n 16 -c 512 -ub 32 -np 2 -t 2 -lv 1
  83. - name: Test
  84. id: cmake_test
  85. run: |
  86. cd build
  87. ctest -L 'main|curl' --verbose --timeout 900
  88. macOS-latest-cmake-x64:
  89. runs-on: macos-13
  90. steps:
  91. - name: Clone
  92. id: checkout
  93. uses: actions/checkout@v4
  94. - name: ccache
  95. uses: ggml-org/ccache-action@v1.2.16
  96. with:
  97. key: macOS-latest-cmake-x64
  98. evict-old-files: 1d
  99. - name: Dependencies
  100. id: depends
  101. continue-on-error: true
  102. run: |
  103. brew update
  104. brew install curl
  105. - name: Build
  106. id: cmake_build
  107. run: |
  108. sysctl -a
  109. # Metal is disabled due to intermittent failures with Github runners not having a GPU:
  110. # https://github.com/ggml-org/llama.cpp/actions/runs/8635935781/job/23674807267#step:5:2313
  111. cmake -B build \
  112. -DCMAKE_BUILD_RPATH="@loader_path" \
  113. -DLLAMA_FATAL_WARNINGS=ON \
  114. -DGGML_METAL=OFF \
  115. -DGGML_RPC=ON \
  116. -DCMAKE_OSX_DEPLOYMENT_TARGET=13.3
  117. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
  118. - name: Test
  119. id: cmake_test
  120. run: |
  121. cd build
  122. ctest -L main --verbose --timeout 900
  123. macOS-latest-cmake-arm64-webgpu:
  124. runs-on: macos-latest
  125. steps:
  126. - name: Clone
  127. id: checkout
  128. uses: actions/checkout@v4
  129. - name: ccache
  130. uses: ggml-org/ccache-action@v1.2.16
  131. with:
  132. key: macOS-latest-cmake-arm64-webgpu
  133. evict-old-files: 1d
  134. - name: Dependencies
  135. id: depends
  136. continue-on-error: true
  137. run: |
  138. brew update
  139. brew install curl
  140. - name: Dawn Dependency
  141. id: dawn-depends
  142. run: |
  143. DAWN_VERSION="v1.0.0"
  144. DAWN_OWNER="reeselevine"
  145. DAWN_REPO="dawn"
  146. DAWN_ASSET_NAME="Dawn-a1a6b45cced25a3b7f4fb491e0ae70796cc7f22b-macos-latest-Release.tar.gz"
  147. echo "Fetching release asset from https://github.com/${DAWN_OWNER}/${DAWN_REPO}/releases/download/${DAWN_VERSION}/${DAWN_ASSET_NAME}"
  148. curl -L -o artifact.tar.gz \
  149. "https://github.com/${DAWN_OWNER}/${DAWN_REPO}/releases/download/${DAWN_VERSION}/${DAWN_ASSET_NAME}"
  150. mkdir dawn
  151. tar -xvf artifact.tar.gz -C dawn --strip-components=1
  152. - name: Build
  153. id: cmake_build
  154. run: |
  155. export CMAKE_PREFIX_PATH=dawn
  156. cmake -B build -DGGML_WEBGPU=ON -DGGML_METAL=OFF -DGGML_BLAS=OFF
  157. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
  158. - name: Test
  159. id: cmake_test
  160. run: |
  161. cd build
  162. ctest -L main --verbose --timeout 900
  163. ubuntu-cpu-cmake:
  164. strategy:
  165. matrix:
  166. include:
  167. - build: 'x64'
  168. os: ubuntu-22.04
  169. - build: 'arm64'
  170. os: ubuntu-22.04-arm
  171. - build: 's390x'
  172. os: ubuntu-24.04-s390x
  173. - build: 'ppc64le'
  174. os: ubuntu-24.04-ppc64le
  175. runs-on: ${{ matrix.os }}
  176. steps:
  177. - name: Clone
  178. id: checkout
  179. uses: actions/checkout@v4
  180. - name: ccache
  181. uses: ggml-org/ccache-action@v1.2.16
  182. with:
  183. key: ubuntu-cpu-cmake
  184. evict-old-files: 1d
  185. - name: Build Dependencies
  186. id: build_depends
  187. run: |
  188. sudo apt-get update
  189. sudo apt-get install -y --no-install-recommends \
  190. python3 python3-pip python3-dev \
  191. libjpeg-dev build-essential libcurl4-openssl-dev \
  192. git-lfs
  193. - name: Python Dependencies
  194. id: python_depends
  195. run: |
  196. python3 -m pip install --upgrade pip
  197. pip3 install ./gguf-py
  198. - name: Swap Endianness
  199. id: endianness
  200. if: ${{ matrix.build == 's390x' }}
  201. run: |
  202. for f in models/*.gguf; do
  203. echo YES | python3 gguf-py/gguf/scripts/gguf_convert_endian.py $f big
  204. done
  205. - name: Build
  206. id: cmake_build
  207. run: |
  208. cmake -B build \
  209. -DLLAMA_FATAL_WARNINGS=ON \
  210. -DGGML_RPC=ON
  211. cmake --build build --config Release -j $(nproc)
  212. - name: Test
  213. id: cmake_test
  214. run: |
  215. cd build
  216. ctest -L 'main|curl' --verbose --timeout 900
  217. - name: Test llama2c conversion
  218. id: llama2c_test
  219. if: ${{ matrix.build != 's390x' }}
  220. run: |
  221. cd build
  222. echo "Fetch tokenizer"
  223. wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories260K/tok512.bin
  224. echo "Fetch llama2c model"
  225. wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories260K/stories260K.bin
  226. ./bin/llama-convert-llama2c-to-ggml --copy-vocab-from-model ./tok512.bin --llama2c-model stories260K.bin --llama2c-output-model stories260K.gguf
  227. ./bin/llama-cli -m stories260K.gguf -p "One day, Lily met a Shoggoth" -n 500 -c 256
  228. - name: Test llama2c (s390x)
  229. id: llama2c_test_s390x
  230. if: ${{ matrix.build == 's390x' }}
  231. run: |
  232. cd build
  233. echo "Fetch llama2c big-endian model"
  234. wget https://huggingface.co/ggml-org/models/resolve/main/tinyllamas/stories260K-be.gguf
  235. ./bin/llama-cli -m stories260K-be.gguf -p "One day, Lily met a Shoggoth" -n 500 -c 256
  236. ubuntu-latest-cmake-sanitizer:
  237. runs-on: ubuntu-latest
  238. continue-on-error: true
  239. strategy:
  240. matrix:
  241. sanitizer: [ADDRESS, THREAD, UNDEFINED]
  242. build_type: [Debug]
  243. steps:
  244. - name: Clone
  245. id: checkout
  246. uses: actions/checkout@v4
  247. - name: ccache
  248. uses: ggml-org/ccache-action@v1.2.16
  249. with:
  250. key: ubuntu-latest-cmake-sanitizer-${{ matrix.sanitizer }}
  251. evict-old-files: 1d
  252. - name: Dependencies
  253. id: depends
  254. run: |
  255. sudo apt-get update
  256. sudo apt-get install build-essential libcurl4-openssl-dev
  257. - name: Build
  258. id: cmake_build
  259. if: ${{ matrix.sanitizer != 'THREAD' }}
  260. run: |
  261. cmake -B build \
  262. -DLLAMA_FATAL_WARNINGS=ON \
  263. -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON \
  264. -DCMAKE_BUILD_TYPE=${{ matrix.build_type }}
  265. cmake --build build --config ${{ matrix.build_type }} -j $(nproc)
  266. - name: Build (no OpenMP)
  267. id: cmake_build_no_openmp
  268. if: ${{ matrix.sanitizer == 'THREAD' }}
  269. run: |
  270. cmake -B build \
  271. -DLLAMA_FATAL_WARNINGS=ON \
  272. -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON \
  273. -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} \
  274. -DGGML_OPENMP=OFF
  275. cmake --build build --config ${{ matrix.build_type }} -j $(nproc)
  276. - name: Test
  277. id: cmake_test
  278. run: |
  279. cd build
  280. ctest -L main --verbose --timeout 900
  281. ubuntu-latest-llguidance:
  282. runs-on: ubuntu-latest
  283. steps:
  284. - name: Clone
  285. id: checkout
  286. uses: actions/checkout@v4
  287. - name: Dependencies
  288. id: depends
  289. run: |
  290. sudo apt-get update
  291. sudo apt-get install build-essential libcurl4-openssl-dev
  292. - name: Build
  293. id: cmake_build
  294. run: |
  295. mkdir build
  296. cd build
  297. cmake .. \
  298. -DLLAMA_FATAL_WARNINGS=ON \
  299. -DLLAMA_LLGUIDANCE=ON
  300. cmake --build . --config Release -j $(nproc)
  301. - name: Test
  302. id: cmake_test
  303. run: |
  304. cd build
  305. ctest -L main --verbose --timeout 900
  306. ubuntu-latest-cmake-rpc:
  307. runs-on: ubuntu-latest
  308. continue-on-error: true
  309. steps:
  310. - name: Clone
  311. id: checkout
  312. uses: actions/checkout@v4
  313. - name: ccache
  314. uses: ggml-org/ccache-action@v1.2.16
  315. with:
  316. key: ubuntu-latest-cmake-rpc
  317. evict-old-files: 1d
  318. - name: Dependencies
  319. id: depends
  320. run: |
  321. sudo apt-get update
  322. sudo apt-get install build-essential libcurl4-openssl-dev
  323. - name: Build
  324. id: cmake_build
  325. run: |
  326. cmake -B build \
  327. -DGGML_RPC=ON
  328. cmake --build build --config Release -j $(nproc)
  329. - name: Test
  330. id: cmake_test
  331. run: |
  332. cd build
  333. ctest -L main --verbose
  334. ubuntu-22-cmake-vulkan:
  335. runs-on: ubuntu-22.04
  336. steps:
  337. - name: Clone
  338. id: checkout
  339. uses: actions/checkout@v4
  340. - name: ccache
  341. uses: ggml-org/ccache-action@v1.2.16
  342. with:
  343. key: ubuntu-22-cmake-vulkan
  344. evict-old-files: 1d
  345. - name: Dependencies
  346. id: depends
  347. run: |
  348. wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | sudo apt-key add -
  349. sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list
  350. sudo apt-get update -y
  351. sudo apt-get install -y build-essential mesa-vulkan-drivers vulkan-sdk libcurl4-openssl-dev
  352. - name: Build
  353. id: cmake_build
  354. run: |
  355. cmake -B build \
  356. -DGGML_VULKAN=ON
  357. cmake --build build --config Release -j $(nproc)
  358. - name: Test
  359. id: cmake_test
  360. run: |
  361. cd build
  362. export GGML_VK_VISIBLE_DEVICES=0
  363. # This is using llvmpipe and runs slower than other backends
  364. ctest -L main --verbose --timeout 4200
  365. ubuntu-22-cmake-webgpu:
  366. runs-on: ubuntu-22.04
  367. steps:
  368. - name: Clone
  369. id: checkout
  370. uses: actions/checkout@v4
  371. - name: ccache
  372. uses: ggml-org/ccache-action@v1.2.16
  373. with:
  374. key: ubuntu-22-cmake-webgpu
  375. evict-old-files: 1d
  376. - name: Vulkan SDK Dependencies
  377. id: vulkan-depends
  378. run: |
  379. wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | sudo apt-key add -
  380. sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list
  381. sudo apt-get update -y
  382. sudo apt-get install -y build-essential mesa-vulkan-drivers vulkan-sdk libcurl4-openssl-dev
  383. - name: Dawn Dependency
  384. id: dawn-depends
  385. run: |
  386. sudo apt-get install -y libxrandr-dev libxinerama-dev libxcursor-dev mesa-common-dev libx11-xcb-dev libxi-dev
  387. DAWN_VERSION="v1.0.0"
  388. DAWN_OWNER="reeselevine"
  389. DAWN_REPO="dawn"
  390. DAWN_ASSET_NAME="Dawn-a1a6b45cced25a3b7f4fb491e0ae70796cc7f22b-ubuntu-latest-Release.tar.gz"
  391. echo "Fetching release asset from https://github.com/${DAWN_OWNER}/${DAWN_REPO}/releases/download/${DAWN_VERSION}/${DAWN_ASSET_NAME}"
  392. curl -L -o artifact.tar.gz \
  393. "https://github.com/${DAWN_OWNER}/${DAWN_REPO}/releases/download/${DAWN_VERSION}/${DAWN_ASSET_NAME}"
  394. mkdir dawn
  395. tar -xvf artifact.tar.gz -C dawn --strip-components=1
  396. - name: Build
  397. id: cmake_build
  398. run: |
  399. export Dawn_DIR=dawn/lib64/cmake/Dawn
  400. cmake -B build -DGGML_WEBGPU=ON
  401. cmake --build build --config Release -j $(nproc)
  402. - name: Test
  403. id: cmake_test
  404. run: |
  405. cd build
  406. # This is using llvmpipe and runs slower than other backends
  407. ctest -L main --verbose --timeout 3600
  408. ubuntu-22-cmake-hip:
  409. runs-on: ubuntu-22.04
  410. container: rocm/dev-ubuntu-22.04:6.1.2
  411. steps:
  412. - name: Clone
  413. id: checkout
  414. uses: actions/checkout@v4
  415. - name: Dependencies
  416. id: depends
  417. run: |
  418. sudo apt-get update
  419. sudo apt-get install -y build-essential git cmake rocblas-dev hipblas-dev libcurl4-openssl-dev
  420. - name: ccache
  421. uses: ggml-org/ccache-action@v1.2.16
  422. with:
  423. key: ubuntu-22-cmake-hip
  424. evict-old-files: 1d
  425. - name: Build with native CMake HIP support
  426. id: cmake_build
  427. run: |
  428. cmake -B build -S . \
  429. -DCMAKE_HIP_COMPILER="$(hipconfig -l)/clang" \
  430. -DGGML_HIP_ROCWMMA_FATTN=ON \
  431. -DGGML_HIP=ON
  432. cmake --build build --config Release -j $(nproc)
  433. ubuntu-22-cmake-musa:
  434. runs-on: ubuntu-22.04
  435. container: mthreads/musa:rc4.3.0-devel-ubuntu22.04-amd64
  436. steps:
  437. - name: Clone
  438. id: checkout
  439. uses: actions/checkout@v4
  440. - name: Dependencies
  441. id: depends
  442. run: |
  443. apt-get update
  444. apt-get install -y build-essential git cmake libcurl4-openssl-dev
  445. - name: ccache
  446. uses: ggml-org/ccache-action@v1.2.16
  447. with:
  448. key: ubuntu-22-cmake-musa
  449. evict-old-files: 1d
  450. - name: Build with native CMake MUSA support
  451. id: cmake_build
  452. run: |
  453. cmake -B build -S . \
  454. -DGGML_MUSA=ON
  455. cmake --build build --config Release -j $(nproc)
  456. ubuntu-22-cmake-sycl:
  457. runs-on: ubuntu-22.04
  458. continue-on-error: true
  459. steps:
  460. - uses: actions/checkout@v4
  461. - name: add oneAPI to apt
  462. shell: bash
  463. run: |
  464. cd /tmp
  465. wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
  466. sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
  467. rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
  468. sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
  469. - name: install oneAPI dpcpp compiler
  470. shell: bash
  471. run: |
  472. sudo apt update
  473. sudo apt install intel-oneapi-compiler-dpcpp-cpp libcurl4-openssl-dev
  474. - name: install oneAPI MKL library
  475. shell: bash
  476. run: |
  477. sudo apt install intel-oneapi-mkl-devel
  478. - name: Clone
  479. id: checkout
  480. uses: actions/checkout@v4
  481. - name: ccache
  482. uses: ggml-org/ccache-action@v1.2.16
  483. with:
  484. key: ubuntu-22-cmake-sycl
  485. evict-old-files: 1d
  486. - name: Build
  487. id: cmake_build
  488. run: |
  489. source /opt/intel/oneapi/setvars.sh
  490. cmake -B build \
  491. -DGGML_SYCL=ON \
  492. -DCMAKE_C_COMPILER=icx \
  493. -DCMAKE_CXX_COMPILER=icpx
  494. cmake --build build --config Release -j $(nproc)
  495. ubuntu-22-cmake-sycl-fp16:
  496. runs-on: ubuntu-22.04
  497. continue-on-error: true
  498. steps:
  499. - uses: actions/checkout@v4
  500. - name: add oneAPI to apt
  501. shell: bash
  502. run: |
  503. cd /tmp
  504. wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
  505. sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
  506. rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
  507. sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
  508. - name: install oneAPI dpcpp compiler
  509. shell: bash
  510. run: |
  511. sudo apt update
  512. sudo apt install intel-oneapi-compiler-dpcpp-cpp libcurl4-openssl-dev
  513. - name: install oneAPI MKL library
  514. shell: bash
  515. run: |
  516. sudo apt install intel-oneapi-mkl-devel
  517. - name: Clone
  518. id: checkout
  519. uses: actions/checkout@v4
  520. - name: ccache
  521. uses: ggml-org/ccache-action@v1.2.16
  522. with:
  523. key: ubuntu-22-cmake-sycl-fp16
  524. evict-old-files: 1d
  525. - name: Build
  526. id: cmake_build
  527. run: |
  528. source /opt/intel/oneapi/setvars.sh
  529. cmake -B build \
  530. -DGGML_SYCL=ON \
  531. -DCMAKE_C_COMPILER=icx \
  532. -DCMAKE_CXX_COMPILER=icpx \
  533. -DGGML_SYCL_F16=ON
  534. cmake --build build --config Release -j $(nproc)
  535. build-linux-cross:
  536. uses: ./.github/workflows/build-linux-cross.yml
  537. build-cmake-pkg:
  538. uses: ./.github/workflows/build-cmake-pkg.yml
  539. macOS-latest-cmake-ios:
  540. runs-on: macos-latest
  541. steps:
  542. - name: Clone
  543. id: checkout
  544. uses: actions/checkout@v4
  545. - name: ccache
  546. uses: ggml-org/ccache-action@v1.2.16
  547. with:
  548. key: macOS-latest-cmake-ios
  549. evict-old-files: 1d
  550. - name: Dependencies
  551. id: depends
  552. continue-on-error: true
  553. run: |
  554. brew update
  555. - name: Build
  556. id: cmake_build
  557. run: |
  558. sysctl -a
  559. cmake -B build -G Xcode \
  560. -DGGML_METAL_USE_BF16=ON \
  561. -DGGML_METAL_EMBED_LIBRARY=ON \
  562. -DLLAMA_BUILD_COMMON=OFF \
  563. -DLLAMA_BUILD_EXAMPLES=OFF \
  564. -DLLAMA_BUILD_TOOLS=OFF \
  565. -DLLAMA_BUILD_TESTS=OFF \
  566. -DLLAMA_BUILD_SERVER=OFF \
  567. -DCMAKE_SYSTEM_NAME=iOS \
  568. -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \
  569. -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
  570. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
  571. macOS-latest-cmake-tvos:
  572. runs-on: macos-latest
  573. steps:
  574. - name: Clone
  575. id: checkout
  576. uses: actions/checkout@v4
  577. - name: ccache
  578. uses: ggml-org/ccache-action@v1.2.16
  579. with:
  580. key: macOS-latest-cmake-tvos
  581. evict-old-files: 1d
  582. - name: Dependencies
  583. id: depends
  584. continue-on-error: true
  585. run: |
  586. brew update
  587. - name: Build
  588. id: cmake_build
  589. run: |
  590. sysctl -a
  591. cmake -B build -G Xcode \
  592. -DGGML_METAL_USE_BF16=ON \
  593. -DGGML_METAL_EMBED_LIBRARY=ON \
  594. -DLLAMA_BUILD_COMMON=OFF \
  595. -DLLAMA_BUILD_EXAMPLES=OFF \
  596. -DLLAMA_BUILD_TOOLS=OFF \
  597. -DLLAMA_BUILD_TESTS=OFF \
  598. -DLLAMA_BUILD_SERVER=OFF \
  599. -DCMAKE_SYSTEM_NAME=tvOS \
  600. -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \
  601. -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
  602. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
  603. macOS-latest-cmake-visionos:
  604. runs-on: macos-latest
  605. steps:
  606. - name: Clone
  607. id: checkout
  608. uses: actions/checkout@v4
  609. - name: Dependencies
  610. id: depends
  611. continue-on-error: true
  612. run: |
  613. brew update
  614. - name: Build
  615. id: cmake_build
  616. run: |
  617. sysctl -a
  618. cmake -B build -G Xcode \
  619. -DGGML_METAL_USE_BF16=ON \
  620. -DGGML_METAL_EMBED_LIBRARY=ON \
  621. -DLLAMA_BUILD_COMMON=OFF \
  622. -DLLAMA_BUILD_EXAMPLES=OFF \
  623. -DLLAMA_BUILD_TOOLS=OFF \
  624. -DLLAMA_BUILD_TESTS=OFF \
  625. -DLLAMA_BUILD_SERVER=OFF \
  626. -DCMAKE_SYSTEM_NAME=visionOS \
  627. -DCMAKE_OSX_DEPLOYMENT_TARGET=1.0 \
  628. -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
  629. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
  630. macOS-latest-swift:
  631. runs-on: macos-latest
  632. needs: ios-xcode-build
  633. strategy:
  634. matrix:
  635. destination: ['generic/platform=macOS', 'generic/platform=iOS', 'generic/platform=tvOS']
  636. steps:
  637. - name: Clone
  638. id: checkout
  639. uses: actions/checkout@v4
  640. - name: ccache
  641. uses: ggml-org/ccache-action@v1.2.16
  642. with:
  643. key: macOS-latest-swift
  644. evict-old-files: 1d
  645. - name: Download xcframework artifact
  646. uses: actions/download-artifact@v4
  647. with:
  648. name: llama-xcframework
  649. path: build-apple/llama.xcframework/
  650. - name: Dependencies
  651. id: depends
  652. continue-on-error: true
  653. run: |
  654. brew update
  655. - name: Build llama.cpp with CMake
  656. id: cmake_build
  657. run: |
  658. sysctl -a
  659. cmake -B build -G Xcode \
  660. -DGGML_METAL_USE_BF16=ON \
  661. -DGGML_METAL_EMBED_LIBRARY=ON \
  662. -DLLAMA_CURL=OFF \
  663. -DLLAMA_BUILD_EXAMPLES=OFF \
  664. -DLLAMA_BUILD_TOOLS=OFF \
  665. -DLLAMA_BUILD_TESTS=OFF \
  666. -DLLAMA_BUILD_SERVER=OFF \
  667. -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64"
  668. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
  669. windows-msys2:
  670. runs-on: windows-2025
  671. strategy:
  672. fail-fast: false
  673. matrix:
  674. include:
  675. - { sys: UCRT64, env: ucrt-x86_64, build: Release }
  676. - { sys: CLANG64, env: clang-x86_64, build: Release }
  677. steps:
  678. - name: Clone
  679. uses: actions/checkout@v4
  680. - name: ccache
  681. uses: ggml-org/ccache-action@v1.2.16
  682. with:
  683. key: windows-msys2
  684. variant: ccache
  685. evict-old-files: 1d
  686. - name: Setup ${{ matrix.sys }}
  687. uses: msys2/setup-msys2@v2
  688. with:
  689. update: true
  690. msystem: ${{matrix.sys}}
  691. install: >-
  692. base-devel
  693. git
  694. mingw-w64-${{matrix.env}}-toolchain
  695. mingw-w64-${{matrix.env}}-cmake
  696. mingw-w64-${{matrix.env}}-openblas
  697. - name: Build using CMake
  698. shell: msys2 {0}
  699. run: |
  700. cmake -B build
  701. cmake --build build --config ${{ matrix.build }} -j $(nproc)
  702. - name: Clean after building using CMake
  703. shell: msys2 {0}
  704. run: |
  705. rm -rf build
  706. - name: Build using CMake w/ OpenBLAS
  707. shell: msys2 {0}
  708. run: |
  709. cmake -B build -DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS
  710. cmake --build build --config ${{ matrix.build }} -j $(nproc)
  711. windows-latest-cmake:
  712. runs-on: windows-2025
  713. env:
  714. OPENBLAS_VERSION: 0.3.23
  715. SDE_VERSION: 9.33.0-2024-01-07
  716. VULKAN_VERSION: 1.4.313.2
  717. strategy:
  718. matrix:
  719. include:
  720. - build: 'cpu-x64 (static)'
  721. arch: 'x64'
  722. defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DBUILD_SHARED_LIBS=OFF'
  723. - build: 'openblas-x64'
  724. arch: 'x64'
  725. defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF -DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS -DBLAS_INCLUDE_DIRS="$env:RUNNER_TEMP/openblas/include" -DBLAS_LIBRARIES="$env:RUNNER_TEMP/openblas/lib/openblas.lib"'
  726. - build: 'vulkan-x64'
  727. arch: 'x64'
  728. defines: '-DCMAKE_BUILD_TYPE=Release -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_VULKAN=ON'
  729. - build: 'llvm-arm64'
  730. arch: 'arm64'
  731. defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON'
  732. - build: 'llvm-arm64-opencl-adreno'
  733. arch: 'arm64'
  734. defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" -DGGML_OPENCL=ON -DGGML_OPENCL_USE_ADRENO_KERNELS=ON'
  735. steps:
  736. - name: Clone
  737. id: checkout
  738. uses: actions/checkout@v4
  739. - name: ccache
  740. uses: ggml-org/ccache-action@v1.2.16
  741. with:
  742. key: windows-latest-cmake-${{ matrix.build }}
  743. variant: ccache
  744. evict-old-files: 1d
  745. - name: Download OpenBLAS
  746. id: get_openblas
  747. if: ${{ matrix.build == 'openblas-x64' }}
  748. run: |
  749. curl.exe -o $env:RUNNER_TEMP/openblas.zip -L "https://github.com/xianyi/OpenBLAS/releases/download/v${env:OPENBLAS_VERSION}/OpenBLAS-${env:OPENBLAS_VERSION}-x64.zip"
  750. curl.exe -o $env:RUNNER_TEMP/OpenBLAS.LICENSE.txt -L "https://github.com/xianyi/OpenBLAS/raw/v${env:OPENBLAS_VERSION}/LICENSE"
  751. mkdir $env:RUNNER_TEMP/openblas
  752. tar.exe -xvf $env:RUNNER_TEMP/openblas.zip -C $env:RUNNER_TEMP/openblas
  753. $vcdir = $(vswhere -latest -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath)
  754. $msvc = $(join-path $vcdir $('VC\Tools\MSVC\'+$(gc -raw $(join-path $vcdir 'VC\Auxiliary\Build\Microsoft.VCToolsVersion.default.txt')).Trim()))
  755. $lib = $(join-path $msvc 'bin\Hostx64\x64\lib.exe')
  756. & $lib /machine:x64 "/def:${env:RUNNER_TEMP}/openblas/lib/libopenblas.def" "/out:${env:RUNNER_TEMP}/openblas/lib/openblas.lib" /name:openblas.dll
  757. - name: Install Vulkan SDK
  758. id: get_vulkan
  759. if: ${{ matrix.build == 'vulkan-x64' }}
  760. run: |
  761. curl.exe -o $env:RUNNER_TEMP/VulkanSDK-Installer.exe -L "https://sdk.lunarg.com/sdk/download/${env:VULKAN_VERSION}/windows/vulkansdk-windows-X64-${env:VULKAN_VERSION}.exe"
  762. & "$env:RUNNER_TEMP\VulkanSDK-Installer.exe" --accept-licenses --default-answer --confirm-command install
  763. Add-Content $env:GITHUB_ENV "VULKAN_SDK=C:\VulkanSDK\${env:VULKAN_VERSION}"
  764. Add-Content $env:GITHUB_PATH "C:\VulkanSDK\${env:VULKAN_VERSION}\bin"
  765. - name: Install Ninja
  766. id: install_ninja
  767. run: |
  768. choco install ninja
  769. - name: Install OpenCL Headers and Libs
  770. id: install_opencl
  771. if: ${{ matrix.build == 'llvm-arm64-opencl-adreno' }}
  772. run: |
  773. git clone https://github.com/KhronosGroup/OpenCL-Headers
  774. cd OpenCL-Headers
  775. cmake -B build `
  776. -DBUILD_TESTING=OFF `
  777. -DOPENCL_HEADERS_BUILD_TESTING=OFF `
  778. -DOPENCL_HEADERS_BUILD_CXX_TESTS=OFF `
  779. -DCMAKE_INSTALL_PREFIX="$env:RUNNER_TEMP/opencl-arm64-release"
  780. cmake --build build --target install
  781. git clone https://github.com/KhronosGroup/OpenCL-ICD-Loader
  782. cd OpenCL-ICD-Loader
  783. cmake -B build-arm64-release `
  784. -A arm64 `
  785. -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" `
  786. -DCMAKE_INSTALL_PREFIX="$env:RUNNER_TEMP/opencl-arm64-release"
  787. cmake --build build-arm64-release --target install --config release
  788. - name: libCURL
  789. id: get_libcurl
  790. uses: ./.github/actions/windows-setup-curl
  791. with:
  792. architecture: ${{ matrix.arch == 'x64' && 'win64' || 'win64a' }}
  793. - name: Build
  794. id: cmake_build
  795. env:
  796. CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
  797. run: |
  798. cmake -S . -B build ${{ matrix.defines }} `
  799. -DCURL_LIBRARY="$env:CURL_PATH/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="$env:CURL_PATH/include"
  800. cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS}
  801. cp $env:CURL_PATH/bin/libcurl-*.dll build/bin/Release
  802. - name: Add libopenblas.dll
  803. id: add_libopenblas_dll
  804. if: ${{ matrix.build == 'openblas-x64' }}
  805. run: |
  806. cp $env:RUNNER_TEMP/openblas/bin/libopenblas.dll ./build/bin/Release/openblas.dll
  807. cp $env:RUNNER_TEMP/OpenBLAS.LICENSE.txt ./build/bin/Release/OpenBLAS-${env:OPENBLAS_VERSION}.txt
  808. - name: Test
  809. id: cmake_test
  810. if: ${{ matrix.arch == 'x64' }}
  811. run: |
  812. cd build
  813. ctest -L main -C Release --verbose --timeout 900
  814. # TODO: disabled for now, consider adding tests for all CPU variants instead
  815. # - name: Test (Intel SDE)
  816. # id: cmake_test_sde
  817. # if: ${{ matrix.build == 'avx512-x64' && env.HAS_AVX512F == '0' }} # use Intel SDE for AVX-512 emulation
  818. # run: |
  819. # curl.exe -o $env:RUNNER_TEMP/sde.tar.xz -L "https://downloadmirror.intel.com/813591/sde-external-${env:SDE_VERSION}-win.tar.xz"
  820. # # for some weird reason windows tar doesn't like sde tar.xz
  821. # 7z x "-o${env:RUNNER_TEMP}" $env:RUNNER_TEMP/sde.tar.xz
  822. # 7z x "-o${env:RUNNER_TEMP}" $env:RUNNER_TEMP/sde.tar
  823. # $sde = $(join-path $env:RUNNER_TEMP sde-external-${env:SDE_VERSION}-win/sde.exe)
  824. # cd build
  825. # $env:LLAMA_SKIP_TESTS_SLOW_ON_EMULATOR = 1
  826. # & $sde -future -- ctest -L main -C Release --verbose --timeout 900
  827. ubuntu-latest-cmake-cuda:
  828. runs-on: ubuntu-latest
  829. container: nvidia/cuda:12.6.2-devel-ubuntu24.04
  830. steps:
  831. - name: Clone
  832. id: checkout
  833. uses: actions/checkout@v4
  834. - name: Install dependencies
  835. env:
  836. DEBIAN_FRONTEND: noninteractive
  837. run: |
  838. apt update
  839. apt install -y cmake build-essential ninja-build libgomp1 git libcurl4-openssl-dev
  840. - name: ccache
  841. uses: ggml-org/ccache-action@v1.2.16
  842. with:
  843. key: ubuntu-latest-cmake-cuda
  844. evict-old-files: 1d
  845. - name: Build with CMake
  846. run: |
  847. cmake -S . -B build -G Ninja \
  848. -DCMAKE_BUILD_TYPE=Release \
  849. -DCMAKE_CUDA_ARCHITECTURES=89-real \
  850. -DCMAKE_EXE_LINKER_FLAGS=-Wl,--allow-shlib-undefined \
  851. -DLLAMA_FATAL_WARNINGS=ON \
  852. -DGGML_NATIVE=OFF \
  853. -DGGML_CUDA=ON
  854. cmake --build build
  855. windows-2022-cmake-cuda:
  856. runs-on: windows-2022
  857. strategy:
  858. matrix:
  859. cuda: ['12.4']
  860. steps:
  861. - name: Clone
  862. id: checkout
  863. uses: actions/checkout@v4
  864. - name: Install ccache
  865. uses: ggml-org/ccache-action@v1.2.16
  866. with:
  867. key: windows-cuda-${{ matrix.cuda }}
  868. variant: ccache
  869. evict-old-files: 1d
  870. - name: Install Cuda Toolkit
  871. uses: ./.github/actions/windows-setup-cuda
  872. with:
  873. cuda_version: ${{ matrix.cuda }}
  874. - name: Install Ninja
  875. id: install_ninja
  876. run: |
  877. choco install ninja
  878. - name: libCURL
  879. id: get_libcurl
  880. uses: ./.github/actions/windows-setup-curl
  881. - name: Build
  882. id: cmake_build
  883. shell: cmd
  884. env:
  885. CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
  886. run: |
  887. call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" x64
  888. cmake -S . -B build -G "Ninja Multi-Config" ^
  889. -DLLAMA_BUILD_SERVER=ON ^
  890. -DGGML_NATIVE=OFF ^
  891. -DGGML_BACKEND_DL=ON ^
  892. -DGGML_CPU_ALL_VARIANTS=ON ^
  893. -DGGML_CUDA=ON ^
  894. -DGGML_RPC=ON ^
  895. -DCURL_LIBRARY="%CURL_PATH%/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="%CURL_PATH%/include"
  896. set /A NINJA_JOBS=%NUMBER_OF_PROCESSORS%-1
  897. cmake --build build --config Release -j %NINJA_JOBS% -t ggml
  898. cmake --build build --config Release
  899. windows-latest-cmake-sycl:
  900. runs-on: windows-2022
  901. defaults:
  902. run:
  903. shell: bash
  904. env:
  905. WINDOWS_BASEKIT_URL: https://registrationcenter-download.intel.com/akdlm/IRC_NAS/7cd9bba0-7aab-4e30-b3ae-2221006a4a05/intel-oneapi-base-toolkit-2025.1.1.34_offline.exe
  906. WINDOWS_DPCPP_MKL: intel.oneapi.win.cpp-dpcpp-common:intel.oneapi.win.mkl.devel:intel.oneapi.win.dnnl:intel.oneapi.win.tbb.devel
  907. ONEAPI_ROOT: "C:/Program Files (x86)/Intel/oneAPI"
  908. steps:
  909. - name: Clone
  910. id: checkout
  911. uses: actions/checkout@v4
  912. - name: ccache
  913. uses: ggml-org/ccache-action@v1.2.16
  914. with:
  915. key: windows-latest-cmake-sycl
  916. variant: ccache
  917. evict-old-files: 1d
  918. - name: Install
  919. run: |
  920. scripts/install-oneapi.bat $WINDOWS_BASEKIT_URL $WINDOWS_DPCPP_MKL
  921. # TODO: add libcurl support ; we will also need to modify win-build-sycl.bat to accept user-specified args
  922. - name: Build
  923. id: cmake_build
  924. run: examples/sycl/win-build-sycl.bat
  925. windows-latest-cmake-hip:
  926. runs-on: windows-2022
  927. env:
  928. # The ROCm version must correspond to the version used in the HIP SDK.
  929. ROCM_VERSION: "6.4.2"
  930. HIPSDK_INSTALLER_VERSION: "25.Q3"
  931. steps:
  932. - name: Clone
  933. id: checkout
  934. uses: actions/checkout@v4
  935. - name: Clone rocWMMA repository
  936. id: clone_rocwmma
  937. run: |
  938. git clone https://github.com/rocm/rocwmma --branch rocm-${{ env.ROCM_VERSION }} --depth 1
  939. - name: Cache ROCm Installation
  940. id: cache-rocm
  941. uses: actions/cache@v4
  942. with:
  943. path: C:\Program Files\AMD\ROCm
  944. key: rocm-${{ env.HIPSDK_INSTALLER_VERSION }}-${{ runner.os }}
  945. - name: Install ROCm
  946. if: steps.cache-rocm.outputs.cache-hit != 'true'
  947. id: depends
  948. run: |
  949. $ErrorActionPreference = "Stop"
  950. write-host "Downloading AMD HIP SDK Installer"
  951. Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-${{ env.HIPSDK_INSTALLER_VERSION }}-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe"
  952. write-host "Installing AMD HIP SDK"
  953. $proc = Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -PassThru
  954. $completed = $proc.WaitForExit(600000)
  955. if (-not $completed) {
  956. Write-Error "ROCm installation timed out after 10 minutes. Killing the process"
  957. $proc.Kill()
  958. exit 1
  959. }
  960. if ($proc.ExitCode -ne 0) {
  961. Write-Error "ROCm installation failed with exit code $($proc.ExitCode)"
  962. exit 1
  963. }
  964. write-host "Completed AMD HIP SDK installation"
  965. - name: Verify ROCm
  966. id: verify
  967. run: |
  968. # Find and test ROCm installation
  969. $clangPath = Get-ChildItem 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | Select-Object -First 1
  970. if (-not $clangPath) {
  971. Write-Error "ROCm installation not found"
  972. exit 1
  973. }
  974. & $clangPath.FullName --version
  975. - name: Install ccache
  976. uses: ggml-org/ccache-action@v1.2.16
  977. with:
  978. key: ${{ github.job }}
  979. evict-old-files: 1d
  980. - name: libCURL
  981. id: get_libcurl
  982. uses: ./.github/actions/windows-setup-curl
  983. - name: Build
  984. id: cmake_build
  985. env:
  986. CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
  987. run: |
  988. $env:HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path)
  989. $env:CMAKE_PREFIX_PATH="${env:HIP_PATH}"
  990. cmake -G "Unix Makefiles" -B build -S . `
  991. -DCMAKE_C_COMPILER="${env:HIP_PATH}\bin\clang.exe" `
  992. -DCMAKE_CXX_COMPILER="${env:HIP_PATH}\bin\clang++.exe" `
  993. -DCMAKE_CXX_FLAGS="-I$($PWD.Path.Replace('\', '/'))/rocwmma/library/include/" `
  994. -DCMAKE_BUILD_TYPE=Release `
  995. -DGGML_HIP=ON `
  996. -DGGML_HIP_ROCWMMA_FATTN=ON `
  997. -DGGML_RPC=ON `
  998. -DCURL_LIBRARY="$env:CURL_PATH/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="$env:CURL_PATH/include"
  999. cmake --build build -j ${env:NUMBER_OF_PROCESSORS}
  1000. ios-xcode-build:
  1001. runs-on: macos-latest
  1002. steps:
  1003. - name: Checkout code
  1004. uses: actions/checkout@v4
  1005. - name: Setup Xcode
  1006. uses: maxim-lobanov/setup-xcode@v1
  1007. with:
  1008. xcode-version: latest-stable
  1009. - name: Build
  1010. id: cmake_build
  1011. run: |
  1012. sysctl -a
  1013. cmake -B build -G Xcode \
  1014. -DGGML_METAL_USE_BF16=ON \
  1015. -DGGML_METAL_EMBED_LIBRARY=ON \
  1016. -DLLAMA_CURL=OFF \
  1017. -DLLAMA_BUILD_EXAMPLES=OFF \
  1018. -DLLAMA_BUILD_TOOLS=OFF \
  1019. -DLLAMA_BUILD_TESTS=OFF \
  1020. -DLLAMA_BUILD_SERVER=OFF \
  1021. -DCMAKE_SYSTEM_NAME=iOS \
  1022. -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \
  1023. -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
  1024. cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
  1025. - name: xcodebuild for swift package
  1026. id: xcodebuild
  1027. run: |
  1028. ./build-xcframework.sh
  1029. - name: Upload xcframework artifact
  1030. uses: actions/upload-artifact@v4
  1031. with:
  1032. name: llama-xcframework
  1033. path: build-apple/llama.xcframework/
  1034. retention-days: 1
  1035. - name: Build Xcode project
  1036. run: |
  1037. xcodebuild -downloadPlatform iOS
  1038. xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' FRAMEWORK_FOLDER_PATH=./build-ios build
  1039. android-build:
  1040. runs-on: ubuntu-latest
  1041. steps:
  1042. - name: Clone
  1043. uses: actions/checkout@v4
  1044. - name: ccache
  1045. uses: ggml-org/ccache-action@v1.2.16
  1046. with:
  1047. key: android-build
  1048. evict-old-files: 1d
  1049. - name: Set up JDK
  1050. uses: actions/setup-java@v3
  1051. with:
  1052. java-version: 17
  1053. distribution: zulu
  1054. - name: Setup Android SDK
  1055. uses: android-actions/setup-android@v3
  1056. with:
  1057. log-accepted-android-sdk-licenses: false
  1058. - name: Build
  1059. run: |
  1060. cd examples/llama.android
  1061. ./gradlew build --no-daemon
  1062. openEuler-latest-cmake-cann:
  1063. if: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'Ascend NPU') }}
  1064. defaults:
  1065. run:
  1066. shell: bash -el {0}
  1067. strategy:
  1068. matrix:
  1069. arch: [x86, aarch64]
  1070. cann:
  1071. - '8.1.RC1.alpha001-910b-openeuler22.03-py3.10'
  1072. device:
  1073. - 'ascend910b3'
  1074. build:
  1075. - 'Release'
  1076. runs-on: ${{ matrix.arch == 'aarch64' && 'ubuntu-24.04-arm' || 'ubuntu-24.04' }}
  1077. container: ascendai/cann:${{ matrix.cann }}
  1078. steps:
  1079. - name: Checkout
  1080. uses: actions/checkout@v4
  1081. - name: Dependencies
  1082. run: |
  1083. yum update -y
  1084. yum install -y git gcc gcc-c++ make cmake libcurl-devel
  1085. - name: Build
  1086. run: |
  1087. export LD_LIBRARY_PATH=${ASCEND_TOOLKIT_HOME}/lib64:${ASCEND_TOOLKIT_HOME}/$(uname -m)-linux/devlib/:${LD_LIBRARY_PATH}
  1088. cmake -S . -B build \
  1089. -DCMAKE_BUILD_TYPE=${{ matrix.build }} \
  1090. -DGGML_CANN=on \
  1091. -DSOC_TYPE=${{ matrix.device }}
  1092. cmake --build build -j $(nproc)
  1093. # TODO: simplify the following workflows using a matrix
  1094. # TODO: run lighter CI on PRs and the full CI only on master (if needed)
  1095. ggml-ci-x64-cpu-low-perf:
  1096. runs-on: ubuntu-22.04
  1097. steps:
  1098. - name: Clone
  1099. id: checkout
  1100. uses: actions/checkout@v4
  1101. - name: ccache
  1102. uses: ggml-org/ccache-action@v1.2.16
  1103. with:
  1104. key: ggml-ci-x64-cpu-low-perf
  1105. evict-old-files: 1d
  1106. - name: Dependencies
  1107. id: depends
  1108. run: |
  1109. sudo apt-get update
  1110. sudo apt-get install build-essential libcurl4-openssl-dev
  1111. - name: Test
  1112. id: ggml-ci
  1113. run: |
  1114. LLAMA_ARG_THREADS=$(nproc) GG_BUILD_LOW_PERF=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
  1115. ggml-ci-arm64-cpu-low-perf:
  1116. runs-on: ubuntu-22.04-arm
  1117. steps:
  1118. - name: Clone
  1119. id: checkout
  1120. uses: actions/checkout@v4
  1121. - name: ccache
  1122. uses: ggml-org/ccache-action@v1.2.16
  1123. with:
  1124. key: ggml-ci-arm64-cpu-low-perf
  1125. evict-old-files: 1d
  1126. - name: Dependencies
  1127. id: depends
  1128. run: |
  1129. sudo apt-get update
  1130. sudo apt-get install build-essential libcurl4-openssl-dev
  1131. - name: Test
  1132. id: ggml-ci
  1133. run: |
  1134. LLAMA_ARG_THREADS=$(nproc) GG_BUILD_LOW_PERF=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
  1135. ggml-ci-x64-cpu-high-perf:
  1136. runs-on: ubuntu-22.04
  1137. steps:
  1138. - name: Clone
  1139. id: checkout
  1140. uses: actions/checkout@v4
  1141. - name: ccache
  1142. uses: ggml-org/ccache-action@v1.2.16
  1143. with:
  1144. key: ggml-ci-x64-cpu-high-perf
  1145. evict-old-files: 1d
  1146. - name: Dependencies
  1147. id: depends
  1148. run: |
  1149. sudo apt-get update
  1150. sudo apt-get install build-essential libcurl4-openssl-dev
  1151. - name: Test
  1152. id: ggml-ci
  1153. run: |
  1154. LLAMA_ARG_THREADS=$(nproc) bash ./ci/run.sh ./tmp/results ./tmp/mnt
  1155. ggml-ci-arm64-cpu-high-perf:
  1156. runs-on: ubuntu-22.04-arm
  1157. steps:
  1158. - name: Clone
  1159. id: checkout
  1160. uses: actions/checkout@v4
  1161. - name: ccache
  1162. uses: ggml-org/ccache-action@v1.2.16
  1163. with:
  1164. key: ggml-ci-arm64-cpu-high-perf
  1165. evict-old-files: 1d
  1166. - name: Dependencies
  1167. id: depends
  1168. run: |
  1169. sudo apt-get update
  1170. sudo apt-get install build-essential libcurl4-openssl-dev
  1171. - name: Test
  1172. id: ggml-ci
  1173. run: |
  1174. LLAMA_ARG_THREADS=$(nproc) GG_BUILD_NO_SVE=1 GG_BUILD_NO_BF16=1 GG_BUILD_EXTRA_TESTS_0=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
  1175. ggml-ci-arm64-cpu-high-perf-sve:
  1176. runs-on: ubuntu-22.04-arm
  1177. steps:
  1178. - name: Clone
  1179. id: checkout
  1180. uses: actions/checkout@v4
  1181. - name: ccache
  1182. uses: ggml-org/ccache-action@v1.2.16
  1183. with:
  1184. key: ggml-ci-arm64-cpu-high-perf-sve
  1185. evict-old-files: 1d
  1186. - name: Dependencies
  1187. id: depends
  1188. run: |
  1189. sudo apt-get update
  1190. sudo apt-get install build-essential libcurl4-openssl-dev
  1191. - name: Test
  1192. id: ggml-ci
  1193. run: |
  1194. LLAMA_ARG_THREADS=$(nproc) GG_BUILD_NO_BF16=1 GG_BUILD_EXTRA_TESTS_0=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
  1195. ggml-ci-x64-nvidia-cuda:
  1196. runs-on: [self-hosted, Linux, X64, NVIDIA]
  1197. steps:
  1198. - name: Clone
  1199. id: checkout
  1200. uses: actions/checkout@v4
  1201. - name: Test
  1202. id: ggml-ci
  1203. run: |
  1204. nvidia-smi
  1205. GG_BUILD_CUDA=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
  1206. ggml-ci-x64-nvidia-vulkan-cm:
  1207. runs-on: [self-hosted, Linux, X64, NVIDIA]
  1208. steps:
  1209. - name: Clone
  1210. id: checkout
  1211. uses: actions/checkout@v4
  1212. - name: Test
  1213. id: ggml-ci
  1214. run: |
  1215. vulkaninfo --summary
  1216. GG_BUILD_VULKAN=1 GGML_VK_DISABLE_COOPMAT2=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
  1217. ggml-ci-x64-nvidia-vulkan-cm2:
  1218. runs-on: [self-hosted, Linux, X64, NVIDIA, COOPMAT2]
  1219. steps:
  1220. - name: Clone
  1221. id: checkout
  1222. uses: actions/checkout@v4
  1223. - name: Test
  1224. id: ggml-ci
  1225. run: |
  1226. vulkaninfo --summary
  1227. GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
  1228. ggml-ci-x64-cpu-amx:
  1229. runs-on: [self-hosted, Linux, X64, CPU, AMX]
  1230. steps:
  1231. - name: Clone
  1232. id: checkout
  1233. uses: actions/checkout@v4
  1234. - name: Test
  1235. id: ggml-ci
  1236. run: |
  1237. bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
  1238. # ggml-ci-x64-amd-vulkan:
  1239. # runs-on: [self-hosted, Linux, X64, AMD]
  1240. #
  1241. # steps:
  1242. # - name: Clone
  1243. # id: checkout
  1244. # uses: actions/checkout@v4
  1245. #
  1246. # - name: Test
  1247. # id: ggml-ci
  1248. # run: |
  1249. # vulkaninfo --summary
  1250. # GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
  1251. #
  1252. # ggml-ci-x64-amd-rocm:
  1253. # runs-on: [self-hosted, Linux, X64, AMD]
  1254. #
  1255. # steps:
  1256. # - name: Clone
  1257. # id: checkout
  1258. # uses: actions/checkout@v4
  1259. #
  1260. # - name: Test
  1261. # id: ggml-ci
  1262. # run: |
  1263. # amd-smi static
  1264. # GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
  1265. ggml-ci-mac-metal:
  1266. runs-on: [self-hosted, macOS, ARM64]
  1267. steps:
  1268. - name: Clone
  1269. id: checkout
  1270. uses: actions/checkout@v4
  1271. - name: Test
  1272. id: ggml-ci
  1273. run: |
  1274. GG_BUILD_METAL=1 bash ./ci/run.sh ~/results/llama.cpp ~/mnt/llama.cpp
  1275. ggml-ci-mac-vulkan:
  1276. runs-on: [self-hosted, macOS, ARM64]
  1277. steps:
  1278. - name: Clone
  1279. id: checkout
  1280. uses: actions/checkout@v4
  1281. - name: Test
  1282. id: ggml-ci
  1283. run: |
  1284. vulkaninfo --summary
  1285. GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp ~/mnt/llama.cpp