| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517 |
- name: CI
- on:
- workflow_dispatch: # allows manual triggering
- push:
- branches:
- - master
- paths: [
- '.github/workflows/build.yml',
- '.github/workflows/build-linux-cross.yml',
- '.github/workflows/build-cmake-pkg.yml',
- '**/CMakeLists.txt',
- '**/.cmake',
- '**/*.h',
- '**/*.hpp',
- '**/*.c',
- '**/*.cpp',
- '**/*.cu',
- '**/*.cuh',
- '**/*.swift',
- '**/*.m',
- '**/*.metal',
- '**/*.comp'
- ]
- pull_request:
- types: [opened, synchronize, reopened]
- paths: [
- '.github/workflows/build.yml',
- '.github/workflows/build-linux-cross.yml',
- '.github/workflows/build-cmake-pkg.yml',
- '**/CMakeLists.txt',
- '**/.cmake',
- '**/*.h',
- '**/*.hpp',
- '**/*.c',
- '**/*.cpp',
- '**/*.cu',
- '**/*.cuh',
- '**/*.swift',
- '**/*.m',
- '**/*.metal',
- '**/*.comp'
- ]
- concurrency:
- group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
- cancel-in-progress: true
- env:
- GGML_NLOOP: 3
- GGML_N_THREADS: 1
- LLAMA_LOG_COLORS: 1
- LLAMA_LOG_PREFIX: 1
- LLAMA_LOG_TIMESTAMPS: 1
- jobs:
- macOS-latest-cmake-arm64:
- runs-on: macos-latest
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: macOS-latest-cmake-arm64
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- continue-on-error: true
- run: |
- brew update
- brew install curl
- - name: Build
- id: cmake_build
- run: |
- sysctl -a
- cmake -B build \
- -DCMAKE_BUILD_RPATH="@loader_path" \
- -DLLAMA_FATAL_WARNINGS=ON \
- -DGGML_METAL_USE_BF16=ON \
- -DGGML_METAL_EMBED_LIBRARY=OFF \
- -DGGML_METAL_SHADER_DEBUG=ON \
- -DGGML_RPC=ON
- cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
- leaks -atExit -- ./build/bin/test-thread-safety -hf ggml-org/gemma-3-270m-qat-GGUF -ngl 99 -p "$(printf 'hello %.0s' {1..128})" -n 16 -c 512 -ub 32 -np 2 -t 2 -lv 1
- - name: Test
- id: cmake_test
- run: |
- cd build
- ctest -L 'main|curl' --verbose --timeout 900
- macOS-latest-cmake-x64:
- runs-on: macos-13
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: macOS-latest-cmake-x64
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- continue-on-error: true
- run: |
- brew update
- brew install curl
- - name: Build
- id: cmake_build
- run: |
- sysctl -a
- # Metal is disabled due to intermittent failures with Github runners not having a GPU:
- # https://github.com/ggml-org/llama.cpp/actions/runs/8635935781/job/23674807267#step:5:2313
- cmake -B build \
- -DCMAKE_BUILD_RPATH="@loader_path" \
- -DLLAMA_FATAL_WARNINGS=ON \
- -DGGML_METAL=OFF \
- -DGGML_RPC=ON \
- -DCMAKE_OSX_DEPLOYMENT_TARGET=13.3
- cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
- - name: Test
- id: cmake_test
- run: |
- cd build
- ctest -L main --verbose --timeout 900
- macOS-latest-cmake-arm64-webgpu:
- runs-on: macos-latest
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: macOS-latest-cmake-arm64-webgpu
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- continue-on-error: true
- run: |
- brew update
- brew install curl
- - name: Dawn Dependency
- id: dawn-depends
- run: |
- DAWN_VERSION="v1.0.0"
- DAWN_OWNER="reeselevine"
- DAWN_REPO="dawn"
- DAWN_ASSET_NAME="Dawn-a1a6b45cced25a3b7f4fb491e0ae70796cc7f22b-macos-latest-Release.tar.gz"
- echo "Fetching release asset from https://github.com/${DAWN_OWNER}/${DAWN_REPO}/releases/download/${DAWN_VERSION}/${DAWN_ASSET_NAME}"
- curl -L -o artifact.tar.gz \
- "https://github.com/${DAWN_OWNER}/${DAWN_REPO}/releases/download/${DAWN_VERSION}/${DAWN_ASSET_NAME}"
- mkdir dawn
- tar -xvf artifact.tar.gz -C dawn --strip-components=1
- - name: Build
- id: cmake_build
- run: |
- export CMAKE_PREFIX_PATH=dawn
- cmake -B build -DGGML_WEBGPU=ON -DGGML_METAL=OFF -DGGML_BLAS=OFF
- cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
- - name: Test
- id: cmake_test
- run: |
- cd build
- ctest -L main --verbose --timeout 900
- ubuntu-cpu-cmake:
- strategy:
- matrix:
- include:
- - build: 'x64'
- os: ubuntu-22.04
- - build: 'arm64'
- os: ubuntu-22.04-arm
- - build: 's390x'
- os: ubuntu-24.04-s390x
- - build: 'ppc64le'
- os: ubuntu-24.04-ppc64le
- runs-on: ${{ matrix.os }}
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ubuntu-cpu-cmake
- evict-old-files: 1d
- - name: Build Dependencies
- id: build_depends
- run: |
- sudo apt-get update
- sudo apt-get install -y --no-install-recommends \
- python3 python3-pip python3-dev \
- libjpeg-dev build-essential libcurl4-openssl-dev \
- git-lfs
- - name: Python Dependencies
- id: python_depends
- run: |
- python3 -m pip install --upgrade pip
- pip3 install ./gguf-py
- - name: Swap Endianness
- id: endianness
- if: ${{ matrix.build == 's390x' }}
- run: |
- for f in models/*.gguf; do
- echo YES | python3 gguf-py/gguf/scripts/gguf_convert_endian.py $f big
- done
- - name: Build
- id: cmake_build
- run: |
- cmake -B build \
- -DLLAMA_FATAL_WARNINGS=ON \
- -DGGML_RPC=ON
- cmake --build build --config Release -j $(nproc)
- - name: Test
- id: cmake_test
- run: |
- cd build
- ctest -L 'main|curl' --verbose --timeout 900
- - name: Test llama2c conversion
- id: llama2c_test
- if: ${{ matrix.build != 's390x' }}
- run: |
- cd build
- echo "Fetch tokenizer"
- wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories260K/tok512.bin
- echo "Fetch llama2c model"
- wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories260K/stories260K.bin
- ./bin/llama-convert-llama2c-to-ggml --copy-vocab-from-model ./tok512.bin --llama2c-model stories260K.bin --llama2c-output-model stories260K.gguf
- ./bin/llama-cli -m stories260K.gguf -p "One day, Lily met a Shoggoth" -n 500 -c 256
- - name: Test llama2c (s390x)
- id: llama2c_test_s390x
- if: ${{ matrix.build == 's390x' }}
- run: |
- cd build
- echo "Fetch llama2c big-endian model"
- wget https://huggingface.co/ggml-org/models/resolve/main/tinyllamas/stories260K-be.gguf
- ./bin/llama-cli -m stories260K-be.gguf -p "One day, Lily met a Shoggoth" -n 500 -c 256
- ubuntu-latest-cmake-sanitizer:
- runs-on: ubuntu-latest
- continue-on-error: true
- strategy:
- matrix:
- sanitizer: [ADDRESS, THREAD, UNDEFINED]
- build_type: [Debug]
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ubuntu-latest-cmake-sanitizer-${{ matrix.sanitizer }}
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- run: |
- sudo apt-get update
- sudo apt-get install build-essential libcurl4-openssl-dev
- - name: Build
- id: cmake_build
- if: ${{ matrix.sanitizer != 'THREAD' }}
- run: |
- cmake -B build \
- -DLLAMA_FATAL_WARNINGS=ON \
- -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON \
- -DCMAKE_BUILD_TYPE=${{ matrix.build_type }}
- cmake --build build --config ${{ matrix.build_type }} -j $(nproc)
- - name: Build (no OpenMP)
- id: cmake_build_no_openmp
- if: ${{ matrix.sanitizer == 'THREAD' }}
- run: |
- cmake -B build \
- -DLLAMA_FATAL_WARNINGS=ON \
- -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON \
- -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} \
- -DGGML_OPENMP=OFF
- cmake --build build --config ${{ matrix.build_type }} -j $(nproc)
- - name: Test
- id: cmake_test
- run: |
- cd build
- ctest -L main --verbose --timeout 900
- ubuntu-latest-llguidance:
- runs-on: ubuntu-latest
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Dependencies
- id: depends
- run: |
- sudo apt-get update
- sudo apt-get install build-essential libcurl4-openssl-dev
- - name: Build
- id: cmake_build
- run: |
- mkdir build
- cd build
- cmake .. \
- -DLLAMA_FATAL_WARNINGS=ON \
- -DLLAMA_LLGUIDANCE=ON
- cmake --build . --config Release -j $(nproc)
- - name: Test
- id: cmake_test
- run: |
- cd build
- ctest -L main --verbose --timeout 900
- ubuntu-latest-cmake-rpc:
- runs-on: ubuntu-latest
- continue-on-error: true
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ubuntu-latest-cmake-rpc
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- run: |
- sudo apt-get update
- sudo apt-get install build-essential libcurl4-openssl-dev
- - name: Build
- id: cmake_build
- run: |
- cmake -B build \
- -DGGML_RPC=ON
- cmake --build build --config Release -j $(nproc)
- - name: Test
- id: cmake_test
- run: |
- cd build
- ctest -L main --verbose
- ubuntu-22-cmake-vulkan:
- runs-on: ubuntu-22.04
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ubuntu-22-cmake-vulkan
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- run: |
- wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | sudo apt-key add -
- sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list
- sudo apt-get update -y
- sudo apt-get install -y build-essential mesa-vulkan-drivers vulkan-sdk libcurl4-openssl-dev
- - name: Build
- id: cmake_build
- run: |
- cmake -B build \
- -DGGML_VULKAN=ON
- cmake --build build --config Release -j $(nproc)
- - name: Test
- id: cmake_test
- run: |
- cd build
- export GGML_VK_VISIBLE_DEVICES=0
- # This is using llvmpipe and runs slower than other backends
- ctest -L main --verbose --timeout 4200
- ubuntu-22-cmake-webgpu:
- runs-on: ubuntu-22.04
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ubuntu-22-cmake-webgpu
- evict-old-files: 1d
- - name: Vulkan SDK Dependencies
- id: vulkan-depends
- run: |
- wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | sudo apt-key add -
- sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list
- sudo apt-get update -y
- sudo apt-get install -y build-essential mesa-vulkan-drivers vulkan-sdk libcurl4-openssl-dev
- - name: Dawn Dependency
- id: dawn-depends
- run: |
- sudo apt-get install -y libxrandr-dev libxinerama-dev libxcursor-dev mesa-common-dev libx11-xcb-dev libxi-dev
- DAWN_VERSION="v1.0.0"
- DAWN_OWNER="reeselevine"
- DAWN_REPO="dawn"
- DAWN_ASSET_NAME="Dawn-a1a6b45cced25a3b7f4fb491e0ae70796cc7f22b-ubuntu-latest-Release.tar.gz"
- echo "Fetching release asset from https://github.com/${DAWN_OWNER}/${DAWN_REPO}/releases/download/${DAWN_VERSION}/${DAWN_ASSET_NAME}"
- curl -L -o artifact.tar.gz \
- "https://github.com/${DAWN_OWNER}/${DAWN_REPO}/releases/download/${DAWN_VERSION}/${DAWN_ASSET_NAME}"
- mkdir dawn
- tar -xvf artifact.tar.gz -C dawn --strip-components=1
- - name: Build
- id: cmake_build
- run: |
- export Dawn_DIR=dawn/lib64/cmake/Dawn
- cmake -B build -DGGML_WEBGPU=ON
- cmake --build build --config Release -j $(nproc)
- - name: Test
- id: cmake_test
- run: |
- cd build
- # This is using llvmpipe and runs slower than other backends
- ctest -L main --verbose --timeout 3600
- ubuntu-22-cmake-hip:
- runs-on: ubuntu-22.04
- container: rocm/dev-ubuntu-22.04:6.1.2
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Dependencies
- id: depends
- run: |
- sudo apt-get update
- sudo apt-get install -y build-essential git cmake rocblas-dev hipblas-dev libcurl4-openssl-dev
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ubuntu-22-cmake-hip
- evict-old-files: 1d
- - name: Build with native CMake HIP support
- id: cmake_build
- run: |
- cmake -B build -S . \
- -DCMAKE_HIP_COMPILER="$(hipconfig -l)/clang" \
- -DGGML_HIP_ROCWMMA_FATTN=ON \
- -DGGML_HIP=ON
- cmake --build build --config Release -j $(nproc)
- ubuntu-22-cmake-musa:
- runs-on: ubuntu-22.04
- container: mthreads/musa:rc4.3.0-devel-ubuntu22.04-amd64
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Dependencies
- id: depends
- run: |
- apt-get update
- apt-get install -y build-essential git cmake libcurl4-openssl-dev
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ubuntu-22-cmake-musa
- evict-old-files: 1d
- - name: Build with native CMake MUSA support
- id: cmake_build
- run: |
- cmake -B build -S . \
- -DGGML_MUSA=ON
- cmake --build build --config Release -j $(nproc)
- ubuntu-22-cmake-sycl:
- runs-on: ubuntu-22.04
- continue-on-error: true
- steps:
- - uses: actions/checkout@v4
- - name: add oneAPI to apt
- shell: bash
- run: |
- cd /tmp
- wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
- sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
- rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
- sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
- - name: install oneAPI dpcpp compiler
- shell: bash
- run: |
- sudo apt update
- sudo apt install intel-oneapi-compiler-dpcpp-cpp libcurl4-openssl-dev
- - name: install oneAPI MKL library
- shell: bash
- run: |
- sudo apt install intel-oneapi-mkl-devel
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ubuntu-22-cmake-sycl
- evict-old-files: 1d
- - name: Build
- id: cmake_build
- run: |
- source /opt/intel/oneapi/setvars.sh
- cmake -B build \
- -DGGML_SYCL=ON \
- -DCMAKE_C_COMPILER=icx \
- -DCMAKE_CXX_COMPILER=icpx
- cmake --build build --config Release -j $(nproc)
- ubuntu-22-cmake-sycl-fp16:
- runs-on: ubuntu-22.04
- continue-on-error: true
- steps:
- - uses: actions/checkout@v4
- - name: add oneAPI to apt
- shell: bash
- run: |
- cd /tmp
- wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
- sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
- rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
- sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
- - name: install oneAPI dpcpp compiler
- shell: bash
- run: |
- sudo apt update
- sudo apt install intel-oneapi-compiler-dpcpp-cpp libcurl4-openssl-dev
- - name: install oneAPI MKL library
- shell: bash
- run: |
- sudo apt install intel-oneapi-mkl-devel
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ubuntu-22-cmake-sycl-fp16
- evict-old-files: 1d
- - name: Build
- id: cmake_build
- run: |
- source /opt/intel/oneapi/setvars.sh
- cmake -B build \
- -DGGML_SYCL=ON \
- -DCMAKE_C_COMPILER=icx \
- -DCMAKE_CXX_COMPILER=icpx \
- -DGGML_SYCL_F16=ON
- cmake --build build --config Release -j $(nproc)
- build-linux-cross:
- uses: ./.github/workflows/build-linux-cross.yml
- build-cmake-pkg:
- uses: ./.github/workflows/build-cmake-pkg.yml
- macOS-latest-cmake-ios:
- runs-on: macos-latest
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: macOS-latest-cmake-ios
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- continue-on-error: true
- run: |
- brew update
- - name: Build
- id: cmake_build
- run: |
- sysctl -a
- cmake -B build -G Xcode \
- -DGGML_METAL_USE_BF16=ON \
- -DGGML_METAL_EMBED_LIBRARY=ON \
- -DLLAMA_BUILD_COMMON=OFF \
- -DLLAMA_BUILD_EXAMPLES=OFF \
- -DLLAMA_BUILD_TOOLS=OFF \
- -DLLAMA_BUILD_TESTS=OFF \
- -DLLAMA_BUILD_SERVER=OFF \
- -DCMAKE_SYSTEM_NAME=iOS \
- -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \
- -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
- cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
- macOS-latest-cmake-tvos:
- runs-on: macos-latest
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: macOS-latest-cmake-tvos
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- continue-on-error: true
- run: |
- brew update
- - name: Build
- id: cmake_build
- run: |
- sysctl -a
- cmake -B build -G Xcode \
- -DGGML_METAL_USE_BF16=ON \
- -DGGML_METAL_EMBED_LIBRARY=ON \
- -DLLAMA_BUILD_COMMON=OFF \
- -DLLAMA_BUILD_EXAMPLES=OFF \
- -DLLAMA_BUILD_TOOLS=OFF \
- -DLLAMA_BUILD_TESTS=OFF \
- -DLLAMA_BUILD_SERVER=OFF \
- -DCMAKE_SYSTEM_NAME=tvOS \
- -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \
- -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
- cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
- macOS-latest-cmake-visionos:
- runs-on: macos-latest
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Dependencies
- id: depends
- continue-on-error: true
- run: |
- brew update
- - name: Build
- id: cmake_build
- run: |
- sysctl -a
- cmake -B build -G Xcode \
- -DGGML_METAL_USE_BF16=ON \
- -DGGML_METAL_EMBED_LIBRARY=ON \
- -DLLAMA_BUILD_COMMON=OFF \
- -DLLAMA_BUILD_EXAMPLES=OFF \
- -DLLAMA_BUILD_TOOLS=OFF \
- -DLLAMA_BUILD_TESTS=OFF \
- -DLLAMA_BUILD_SERVER=OFF \
- -DCMAKE_SYSTEM_NAME=visionOS \
- -DCMAKE_OSX_DEPLOYMENT_TARGET=1.0 \
- -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
- cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
- macOS-latest-swift:
- runs-on: macos-latest
- needs: ios-xcode-build
- strategy:
- matrix:
- destination: ['generic/platform=macOS', 'generic/platform=iOS', 'generic/platform=tvOS']
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: macOS-latest-swift
- evict-old-files: 1d
- - name: Download xcframework artifact
- uses: actions/download-artifact@v4
- with:
- name: llama-xcframework
- path: build-apple/llama.xcframework/
- - name: Dependencies
- id: depends
- continue-on-error: true
- run: |
- brew update
- - name: Build llama.cpp with CMake
- id: cmake_build
- run: |
- sysctl -a
- cmake -B build -G Xcode \
- -DGGML_METAL_USE_BF16=ON \
- -DGGML_METAL_EMBED_LIBRARY=ON \
- -DLLAMA_CURL=OFF \
- -DLLAMA_BUILD_EXAMPLES=OFF \
- -DLLAMA_BUILD_TOOLS=OFF \
- -DLLAMA_BUILD_TESTS=OFF \
- -DLLAMA_BUILD_SERVER=OFF \
- -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64"
- cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
- windows-msys2:
- runs-on: windows-2025
- strategy:
- fail-fast: false
- matrix:
- include:
- - { sys: UCRT64, env: ucrt-x86_64, build: Release }
- - { sys: CLANG64, env: clang-x86_64, build: Release }
- steps:
- - name: Clone
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: windows-msys2
- variant: ccache
- evict-old-files: 1d
- - name: Setup ${{ matrix.sys }}
- uses: msys2/setup-msys2@v2
- with:
- update: true
- msystem: ${{matrix.sys}}
- install: >-
- base-devel
- git
- mingw-w64-${{matrix.env}}-toolchain
- mingw-w64-${{matrix.env}}-cmake
- mingw-w64-${{matrix.env}}-openblas
- - name: Build using CMake
- shell: msys2 {0}
- run: |
- cmake -B build
- cmake --build build --config ${{ matrix.build }} -j $(nproc)
- - name: Clean after building using CMake
- shell: msys2 {0}
- run: |
- rm -rf build
- - name: Build using CMake w/ OpenBLAS
- shell: msys2 {0}
- run: |
- cmake -B build -DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS
- cmake --build build --config ${{ matrix.build }} -j $(nproc)
- windows-latest-cmake:
- runs-on: windows-2025
- env:
- OPENBLAS_VERSION: 0.3.23
- SDE_VERSION: 9.33.0-2024-01-07
- VULKAN_VERSION: 1.4.313.2
- strategy:
- matrix:
- include:
- - build: 'cpu-x64 (static)'
- arch: 'x64'
- defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DBUILD_SHARED_LIBS=OFF'
- - build: 'openblas-x64'
- arch: 'x64'
- defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF -DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS -DBLAS_INCLUDE_DIRS="$env:RUNNER_TEMP/openblas/include" -DBLAS_LIBRARIES="$env:RUNNER_TEMP/openblas/lib/openblas.lib"'
- - build: 'vulkan-x64'
- arch: 'x64'
- defines: '-DCMAKE_BUILD_TYPE=Release -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_VULKAN=ON'
- - build: 'llvm-arm64'
- arch: 'arm64'
- defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON'
- - build: 'llvm-arm64-opencl-adreno'
- arch: 'arm64'
- defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" -DGGML_OPENCL=ON -DGGML_OPENCL_USE_ADRENO_KERNELS=ON'
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: windows-latest-cmake-${{ matrix.build }}
- variant: ccache
- evict-old-files: 1d
- - name: Download OpenBLAS
- id: get_openblas
- if: ${{ matrix.build == 'openblas-x64' }}
- run: |
- curl.exe -o $env:RUNNER_TEMP/openblas.zip -L "https://github.com/xianyi/OpenBLAS/releases/download/v${env:OPENBLAS_VERSION}/OpenBLAS-${env:OPENBLAS_VERSION}-x64.zip"
- curl.exe -o $env:RUNNER_TEMP/OpenBLAS.LICENSE.txt -L "https://github.com/xianyi/OpenBLAS/raw/v${env:OPENBLAS_VERSION}/LICENSE"
- mkdir $env:RUNNER_TEMP/openblas
- tar.exe -xvf $env:RUNNER_TEMP/openblas.zip -C $env:RUNNER_TEMP/openblas
- $vcdir = $(vswhere -latest -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath)
- $msvc = $(join-path $vcdir $('VC\Tools\MSVC\'+$(gc -raw $(join-path $vcdir 'VC\Auxiliary\Build\Microsoft.VCToolsVersion.default.txt')).Trim()))
- $lib = $(join-path $msvc 'bin\Hostx64\x64\lib.exe')
- & $lib /machine:x64 "/def:${env:RUNNER_TEMP}/openblas/lib/libopenblas.def" "/out:${env:RUNNER_TEMP}/openblas/lib/openblas.lib" /name:openblas.dll
- - name: Install Vulkan SDK
- id: get_vulkan
- if: ${{ matrix.build == 'vulkan-x64' }}
- run: |
- curl.exe -o $env:RUNNER_TEMP/VulkanSDK-Installer.exe -L "https://sdk.lunarg.com/sdk/download/${env:VULKAN_VERSION}/windows/vulkansdk-windows-X64-${env:VULKAN_VERSION}.exe"
- & "$env:RUNNER_TEMP\VulkanSDK-Installer.exe" --accept-licenses --default-answer --confirm-command install
- Add-Content $env:GITHUB_ENV "VULKAN_SDK=C:\VulkanSDK\${env:VULKAN_VERSION}"
- Add-Content $env:GITHUB_PATH "C:\VulkanSDK\${env:VULKAN_VERSION}\bin"
- - name: Install Ninja
- id: install_ninja
- run: |
- choco install ninja
- - name: Install OpenCL Headers and Libs
- id: install_opencl
- if: ${{ matrix.build == 'llvm-arm64-opencl-adreno' }}
- run: |
- git clone https://github.com/KhronosGroup/OpenCL-Headers
- cd OpenCL-Headers
- cmake -B build `
- -DBUILD_TESTING=OFF `
- -DOPENCL_HEADERS_BUILD_TESTING=OFF `
- -DOPENCL_HEADERS_BUILD_CXX_TESTS=OFF `
- -DCMAKE_INSTALL_PREFIX="$env:RUNNER_TEMP/opencl-arm64-release"
- cmake --build build --target install
- git clone https://github.com/KhronosGroup/OpenCL-ICD-Loader
- cd OpenCL-ICD-Loader
- cmake -B build-arm64-release `
- -A arm64 `
- -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" `
- -DCMAKE_INSTALL_PREFIX="$env:RUNNER_TEMP/opencl-arm64-release"
- cmake --build build-arm64-release --target install --config release
- - name: libCURL
- id: get_libcurl
- uses: ./.github/actions/windows-setup-curl
- with:
- architecture: ${{ matrix.arch == 'x64' && 'win64' || 'win64a' }}
- - name: Build
- id: cmake_build
- env:
- CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
- run: |
- cmake -S . -B build ${{ matrix.defines }} `
- -DCURL_LIBRARY="$env:CURL_PATH/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="$env:CURL_PATH/include"
- cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS}
- cp $env:CURL_PATH/bin/libcurl-*.dll build/bin/Release
- - name: Add libopenblas.dll
- id: add_libopenblas_dll
- if: ${{ matrix.build == 'openblas-x64' }}
- run: |
- cp $env:RUNNER_TEMP/openblas/bin/libopenblas.dll ./build/bin/Release/openblas.dll
- cp $env:RUNNER_TEMP/OpenBLAS.LICENSE.txt ./build/bin/Release/OpenBLAS-${env:OPENBLAS_VERSION}.txt
- - name: Test
- id: cmake_test
- if: ${{ matrix.arch == 'x64' }}
- run: |
- cd build
- ctest -L main -C Release --verbose --timeout 900
- # TODO: disabled for now, consider adding tests for all CPU variants instead
- # - name: Test (Intel SDE)
- # id: cmake_test_sde
- # if: ${{ matrix.build == 'avx512-x64' && env.HAS_AVX512F == '0' }} # use Intel SDE for AVX-512 emulation
- # run: |
- # curl.exe -o $env:RUNNER_TEMP/sde.tar.xz -L "https://downloadmirror.intel.com/813591/sde-external-${env:SDE_VERSION}-win.tar.xz"
- # # for some weird reason windows tar doesn't like sde tar.xz
- # 7z x "-o${env:RUNNER_TEMP}" $env:RUNNER_TEMP/sde.tar.xz
- # 7z x "-o${env:RUNNER_TEMP}" $env:RUNNER_TEMP/sde.tar
- # $sde = $(join-path $env:RUNNER_TEMP sde-external-${env:SDE_VERSION}-win/sde.exe)
- # cd build
- # $env:LLAMA_SKIP_TESTS_SLOW_ON_EMULATOR = 1
- # & $sde -future -- ctest -L main -C Release --verbose --timeout 900
- ubuntu-latest-cmake-cuda:
- runs-on: ubuntu-latest
- container: nvidia/cuda:12.6.2-devel-ubuntu24.04
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Install dependencies
- env:
- DEBIAN_FRONTEND: noninteractive
- run: |
- apt update
- apt install -y cmake build-essential ninja-build libgomp1 git libcurl4-openssl-dev
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ubuntu-latest-cmake-cuda
- evict-old-files: 1d
- - name: Build with CMake
- run: |
- cmake -S . -B build -G Ninja \
- -DCMAKE_BUILD_TYPE=Release \
- -DCMAKE_CUDA_ARCHITECTURES=89-real \
- -DCMAKE_EXE_LINKER_FLAGS=-Wl,--allow-shlib-undefined \
- -DLLAMA_FATAL_WARNINGS=ON \
- -DGGML_NATIVE=OFF \
- -DGGML_CUDA=ON
- cmake --build build
- windows-2022-cmake-cuda:
- runs-on: windows-2022
- strategy:
- matrix:
- cuda: ['12.4']
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Install ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: windows-cuda-${{ matrix.cuda }}
- variant: ccache
- evict-old-files: 1d
- - name: Install Cuda Toolkit
- uses: ./.github/actions/windows-setup-cuda
- with:
- cuda_version: ${{ matrix.cuda }}
- - name: Install Ninja
- id: install_ninja
- run: |
- choco install ninja
- - name: libCURL
- id: get_libcurl
- uses: ./.github/actions/windows-setup-curl
- - name: Build
- id: cmake_build
- shell: cmd
- env:
- CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
- run: |
- call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" x64
- cmake -S . -B build -G "Ninja Multi-Config" ^
- -DLLAMA_BUILD_SERVER=ON ^
- -DGGML_NATIVE=OFF ^
- -DGGML_BACKEND_DL=ON ^
- -DGGML_CPU_ALL_VARIANTS=ON ^
- -DGGML_CUDA=ON ^
- -DGGML_RPC=ON ^
- -DCURL_LIBRARY="%CURL_PATH%/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="%CURL_PATH%/include"
- set /A NINJA_JOBS=%NUMBER_OF_PROCESSORS%-1
- cmake --build build --config Release -j %NINJA_JOBS% -t ggml
- cmake --build build --config Release
- windows-latest-cmake-sycl:
- runs-on: windows-2022
- defaults:
- run:
- shell: bash
- env:
- WINDOWS_BASEKIT_URL: https://registrationcenter-download.intel.com/akdlm/IRC_NAS/7cd9bba0-7aab-4e30-b3ae-2221006a4a05/intel-oneapi-base-toolkit-2025.1.1.34_offline.exe
- WINDOWS_DPCPP_MKL: intel.oneapi.win.cpp-dpcpp-common:intel.oneapi.win.mkl.devel:intel.oneapi.win.dnnl:intel.oneapi.win.tbb.devel
- ONEAPI_ROOT: "C:/Program Files (x86)/Intel/oneAPI"
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: windows-latest-cmake-sycl
- variant: ccache
- evict-old-files: 1d
- - name: Install
- run: |
- scripts/install-oneapi.bat $WINDOWS_BASEKIT_URL $WINDOWS_DPCPP_MKL
- # TODO: add libcurl support ; we will also need to modify win-build-sycl.bat to accept user-specified args
- - name: Build
- id: cmake_build
- run: examples/sycl/win-build-sycl.bat
- windows-latest-cmake-hip:
- runs-on: windows-2022
- env:
- # The ROCm version must correspond to the version used in the HIP SDK.
- ROCM_VERSION: "6.4.2"
- HIPSDK_INSTALLER_VERSION: "25.Q3"
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Clone rocWMMA repository
- id: clone_rocwmma
- run: |
- git clone https://github.com/rocm/rocwmma --branch rocm-${{ env.ROCM_VERSION }} --depth 1
- - name: Cache ROCm Installation
- id: cache-rocm
- uses: actions/cache@v4
- with:
- path: C:\Program Files\AMD\ROCm
- key: rocm-${{ env.HIPSDK_INSTALLER_VERSION }}-${{ runner.os }}
- - name: Install ROCm
- if: steps.cache-rocm.outputs.cache-hit != 'true'
- id: depends
- run: |
- $ErrorActionPreference = "Stop"
- write-host "Downloading AMD HIP SDK Installer"
- Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-${{ env.HIPSDK_INSTALLER_VERSION }}-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe"
- write-host "Installing AMD HIP SDK"
- $proc = Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -PassThru
- $completed = $proc.WaitForExit(600000)
- if (-not $completed) {
- Write-Error "ROCm installation timed out after 10 minutes. Killing the process"
- $proc.Kill()
- exit 1
- }
- if ($proc.ExitCode -ne 0) {
- Write-Error "ROCm installation failed with exit code $($proc.ExitCode)"
- exit 1
- }
- write-host "Completed AMD HIP SDK installation"
- - name: Verify ROCm
- id: verify
- run: |
- # Find and test ROCm installation
- $clangPath = Get-ChildItem 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | Select-Object -First 1
- if (-not $clangPath) {
- Write-Error "ROCm installation not found"
- exit 1
- }
- & $clangPath.FullName --version
- - name: Install ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ${{ github.job }}
- evict-old-files: 1d
- - name: libCURL
- id: get_libcurl
- uses: ./.github/actions/windows-setup-curl
- - name: Build
- id: cmake_build
- env:
- CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }}
- run: |
- $env:HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path)
- $env:CMAKE_PREFIX_PATH="${env:HIP_PATH}"
- cmake -G "Unix Makefiles" -B build -S . `
- -DCMAKE_C_COMPILER="${env:HIP_PATH}\bin\clang.exe" `
- -DCMAKE_CXX_COMPILER="${env:HIP_PATH}\bin\clang++.exe" `
- -DCMAKE_CXX_FLAGS="-I$($PWD.Path.Replace('\', '/'))/rocwmma/library/include/" `
- -DCMAKE_BUILD_TYPE=Release `
- -DGGML_HIP=ON `
- -DGGML_HIP_ROCWMMA_FATTN=ON `
- -DGGML_RPC=ON `
- -DCURL_LIBRARY="$env:CURL_PATH/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="$env:CURL_PATH/include"
- cmake --build build -j ${env:NUMBER_OF_PROCESSORS}
- ios-xcode-build:
- runs-on: macos-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Setup Xcode
- uses: maxim-lobanov/setup-xcode@v1
- with:
- xcode-version: latest-stable
- - name: Build
- id: cmake_build
- run: |
- sysctl -a
- cmake -B build -G Xcode \
- -DGGML_METAL_USE_BF16=ON \
- -DGGML_METAL_EMBED_LIBRARY=ON \
- -DLLAMA_CURL=OFF \
- -DLLAMA_BUILD_EXAMPLES=OFF \
- -DLLAMA_BUILD_TOOLS=OFF \
- -DLLAMA_BUILD_TESTS=OFF \
- -DLLAMA_BUILD_SERVER=OFF \
- -DCMAKE_SYSTEM_NAME=iOS \
- -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \
- -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
- cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
- - name: xcodebuild for swift package
- id: xcodebuild
- run: |
- ./build-xcframework.sh
- - name: Upload xcframework artifact
- uses: actions/upload-artifact@v4
- with:
- name: llama-xcframework
- path: build-apple/llama.xcframework/
- retention-days: 1
- - name: Build Xcode project
- run: |
- xcodebuild -downloadPlatform iOS
- xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' FRAMEWORK_FOLDER_PATH=./build-ios build
- android-build:
- runs-on: ubuntu-latest
- steps:
- - name: Clone
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: android-build
- evict-old-files: 1d
- - name: Set up JDK
- uses: actions/setup-java@v3
- with:
- java-version: 17
- distribution: zulu
- - name: Setup Android SDK
- uses: android-actions/setup-android@v3
- with:
- log-accepted-android-sdk-licenses: false
- - name: Build
- run: |
- cd examples/llama.android
- ./gradlew build --no-daemon
- openEuler-latest-cmake-cann:
- if: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'Ascend NPU') }}
- defaults:
- run:
- shell: bash -el {0}
- strategy:
- matrix:
- arch: [x86, aarch64]
- cann:
- - '8.1.RC1.alpha001-910b-openeuler22.03-py3.10'
- device:
- - 'ascend910b3'
- build:
- - 'Release'
- runs-on: ${{ matrix.arch == 'aarch64' && 'ubuntu-24.04-arm' || 'ubuntu-24.04' }}
- container: ascendai/cann:${{ matrix.cann }}
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- - name: Dependencies
- run: |
- yum update -y
- yum install -y git gcc gcc-c++ make cmake libcurl-devel
- - name: Build
- run: |
- export LD_LIBRARY_PATH=${ASCEND_TOOLKIT_HOME}/lib64:${ASCEND_TOOLKIT_HOME}/$(uname -m)-linux/devlib/:${LD_LIBRARY_PATH}
- cmake -S . -B build \
- -DCMAKE_BUILD_TYPE=${{ matrix.build }} \
- -DGGML_CANN=on \
- -DSOC_TYPE=${{ matrix.device }}
- cmake --build build -j $(nproc)
- # TODO: simplify the following workflows using a matrix
- # TODO: run lighter CI on PRs and the full CI only on master (if needed)
- ggml-ci-x64-cpu-low-perf:
- runs-on: ubuntu-22.04
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ggml-ci-x64-cpu-low-perf
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- run: |
- sudo apt-get update
- sudo apt-get install build-essential libcurl4-openssl-dev
- - name: Test
- id: ggml-ci
- run: |
- LLAMA_ARG_THREADS=$(nproc) GG_BUILD_LOW_PERF=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
- ggml-ci-arm64-cpu-low-perf:
- runs-on: ubuntu-22.04-arm
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ggml-ci-arm64-cpu-low-perf
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- run: |
- sudo apt-get update
- sudo apt-get install build-essential libcurl4-openssl-dev
- - name: Test
- id: ggml-ci
- run: |
- LLAMA_ARG_THREADS=$(nproc) GG_BUILD_LOW_PERF=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
- ggml-ci-x64-cpu-high-perf:
- runs-on: ubuntu-22.04
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ggml-ci-x64-cpu-high-perf
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- run: |
- sudo apt-get update
- sudo apt-get install build-essential libcurl4-openssl-dev
- - name: Test
- id: ggml-ci
- run: |
- LLAMA_ARG_THREADS=$(nproc) bash ./ci/run.sh ./tmp/results ./tmp/mnt
- ggml-ci-arm64-cpu-high-perf:
- runs-on: ubuntu-22.04-arm
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ggml-ci-arm64-cpu-high-perf
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- run: |
- sudo apt-get update
- sudo apt-get install build-essential libcurl4-openssl-dev
- - name: Test
- id: ggml-ci
- run: |
- LLAMA_ARG_THREADS=$(nproc) GG_BUILD_NO_SVE=1 GG_BUILD_NO_BF16=1 GG_BUILD_EXTRA_TESTS_0=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
- ggml-ci-arm64-cpu-high-perf-sve:
- runs-on: ubuntu-22.04-arm
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: ccache
- uses: ggml-org/ccache-action@v1.2.16
- with:
- key: ggml-ci-arm64-cpu-high-perf-sve
- evict-old-files: 1d
- - name: Dependencies
- id: depends
- run: |
- sudo apt-get update
- sudo apt-get install build-essential libcurl4-openssl-dev
- - name: Test
- id: ggml-ci
- run: |
- LLAMA_ARG_THREADS=$(nproc) GG_BUILD_NO_BF16=1 GG_BUILD_EXTRA_TESTS_0=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
- ggml-ci-x64-nvidia-cuda:
- runs-on: [self-hosted, Linux, X64, NVIDIA]
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Test
- id: ggml-ci
- run: |
- nvidia-smi
- GG_BUILD_CUDA=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
- ggml-ci-x64-nvidia-vulkan-cm:
- runs-on: [self-hosted, Linux, X64, NVIDIA]
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Test
- id: ggml-ci
- run: |
- vulkaninfo --summary
- GG_BUILD_VULKAN=1 GGML_VK_DISABLE_COOPMAT2=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
- ggml-ci-x64-nvidia-vulkan-cm2:
- runs-on: [self-hosted, Linux, X64, NVIDIA, COOPMAT2]
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Test
- id: ggml-ci
- run: |
- vulkaninfo --summary
- GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
- ggml-ci-x64-cpu-amx:
- runs-on: [self-hosted, Linux, X64, CPU, AMX]
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Test
- id: ggml-ci
- run: |
- bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
- # ggml-ci-x64-amd-vulkan:
- # runs-on: [self-hosted, Linux, X64, AMD]
- #
- # steps:
- # - name: Clone
- # id: checkout
- # uses: actions/checkout@v4
- #
- # - name: Test
- # id: ggml-ci
- # run: |
- # vulkaninfo --summary
- # GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
- #
- # ggml-ci-x64-amd-rocm:
- # runs-on: [self-hosted, Linux, X64, AMD]
- #
- # steps:
- # - name: Clone
- # id: checkout
- # uses: actions/checkout@v4
- #
- # - name: Test
- # id: ggml-ci
- # run: |
- # amd-smi static
- # GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
- ggml-ci-mac-metal:
- runs-on: [self-hosted, macOS, ARM64]
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Test
- id: ggml-ci
- run: |
- GG_BUILD_METAL=1 bash ./ci/run.sh ~/results/llama.cpp ~/mnt/llama.cpp
- ggml-ci-mac-vulkan:
- runs-on: [self-hosted, macOS, ARM64]
- steps:
- - name: Clone
- id: checkout
- uses: actions/checkout@v4
- - name: Test
- id: ggml-ci
- run: |
- vulkaninfo --summary
- GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp ~/mnt/llama.cpp
|