diff --git a/.devops/cloud-v-pipeline b/.devops/cloud-v-pipeline deleted file mode 100644 index af8c0cea61..0000000000 --- a/.devops/cloud-v-pipeline +++ /dev/null @@ -1,22 +0,0 @@ -node('x86_runner1'){ // Running on x86 runner containing latest vector qemu, latest vector gcc and all the necessary libraries - stage('Cleanup'){ - cleanWs() // Cleaning previous CI build in workspace - } - stage('checkout repo'){ - retry(5){ // Retry if the cloning fails due to some reason - checkout scm // Clone the repo on Runner - } - } - stage('Compiling llama.cpp'){ - sh'''#!/bin/bash - make RISCV=1 RISCV_CROSS_COMPILE=1 # Compiling llama for RISC-V - ''' - } - stage('Running llama.cpp'){ - sh'''#!/bin/bash - module load gnu-bin2/0.1 # loading latest versions of vector qemu and vector gcc - qemu-riscv64 -L /softwares/gnu-bin2/sysroot -cpu rv64,v=true,vlen=256,elen=64,vext_spec=v1.0 ./llama-cli -m /home/alitariq/codellama-7b.Q4_K_M.gguf -p "Anything" -n 9 > llama_log.txt # Running llama.cpp on vector qemu-riscv64 - cat llama_log.txt # Printing results - ''' - } -} diff --git a/.devops/cpu.Dockerfile b/.devops/cpu.Dockerfile index 9459f08c10..e1bb7d4675 100644 --- a/.devops/cpu.Dockerfile +++ b/.devops/cpu.Dockerfile @@ -4,8 +4,6 @@ FROM ubuntu:$UBUNTU_VERSION AS build ARG TARGETARCH -ARG GGML_CPU_ARM_ARCH=armv8-a - RUN apt-get update && \ apt-get install -y build-essential git cmake libcurl4-openssl-dev @@ -13,10 +11,8 @@ WORKDIR /app COPY . . -RUN if [ "$TARGETARCH" = "amd64" ]; then \ +RUN if [ "$TARGETARCH" = "amd64" ] || [ "$TARGETARCH" = "arm64" ]; then \ cmake -S . -B build -DCMAKE_BUILD_TYPE=Release -DGGML_NATIVE=OFF -DLLAMA_BUILD_TESTS=OFF -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON; \ - elif [ "$TARGETARCH" = "arm64" ]; then \ - cmake -S . -B build -DCMAKE_BUILD_TYPE=Release -DGGML_NATIVE=OFF -DLLAMA_BUILD_TESTS=OFF -DGGML_CPU_ARM_ARCH=${GGML_CPU_ARM_ARCH}; \ else \ echo "Unsupported architecture"; \ exit 1; \ diff --git a/.devops/cuda.Dockerfile b/.devops/cuda.Dockerfile index 94f1433972..4b708ae278 100644 --- a/.devops/cuda.Dockerfile +++ b/.devops/cuda.Dockerfile @@ -61,7 +61,7 @@ RUN apt-get update \ python3 \ python3-pip \ && pip install --upgrade pip setuptools wheel \ - && pip install -r requirements.txt \ + && pip install --break-system-packages -r requirements.txt \ && apt autoremove -y \ && apt clean -y \ && rm -rf /tmp/* /var/tmp/* \ diff --git a/.github/ISSUE_TEMPLATE/010-bug-compilation.yml b/.github/ISSUE_TEMPLATE/010-bug-compilation.yml index 95a0b5cc75..feb0d51205 100644 --- a/.github/ISSUE_TEMPLATE/010-bug-compilation.yml +++ b/.github/ISSUE_TEMPLATE/010-bug-compilation.yml @@ -40,7 +40,7 @@ body: attributes: label: GGML backends description: Which GGML backends do you know to be affected? - options: [AMX, BLAS, CPU, CUDA, HIP, Metal, Musa, RPC, SYCL, Vulkan, OpenCL] + options: [AMX, BLAS, CPU, CUDA, HIP, Metal, Musa, RPC, SYCL, Vulkan, OpenCL, zDNN] multiple: true validations: required: true diff --git a/.github/ISSUE_TEMPLATE/011-bug-results.yml b/.github/ISSUE_TEMPLATE/011-bug-results.yml index d1034bbb69..c42a14ff83 100644 --- a/.github/ISSUE_TEMPLATE/011-bug-results.yml +++ b/.github/ISSUE_TEMPLATE/011-bug-results.yml @@ -42,7 +42,7 @@ body: attributes: label: GGML backends description: Which GGML backends do you know to be affected? - options: [AMX, BLAS, CPU, CUDA, HIP, Metal, Musa, RPC, SYCL, Vulkan, OpenCL] + options: [AMX, BLAS, CPU, CUDA, HIP, Metal, Musa, RPC, SYCL, Vulkan, OpenCL, zDNN] multiple: true validations: required: true diff --git a/.github/labeler.yml b/.github/labeler.yml index df6a7a40ed..c4da4ab4e1 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -22,6 +22,11 @@ Vulkan: - any-glob-to-any-file: - ggml/include/ggml-vulkan.h - ggml/src/ggml-vulkan/** +IBM zDNN: + - changed-files: + - any-glob-to-any-file: + - ggml/include/ggml-zdnn.h + - ggml/src/ggml-zdnn/** documentation: - changed-files: - any-glob-to-any-file: diff --git a/.github/workflows/build-riscv-native.yml b/.github/workflows/build-riscv-native.yml new file mode 100644 index 0000000000..a968f6d519 --- /dev/null +++ b/.github/workflows/build-riscv-native.yml @@ -0,0 +1,43 @@ +name: Build on RISCV Linux Machine by Cloud-V +on: + workflow_dispatch: + workflow_call: + +jobs: + bianbu-riscv64-native: # Bianbu 2.2 + runs-on: self-hosted + + steps: + - name: Install prerequisites + run: | + sudo apt-get update || true + sudo apt-get install -y libatomic1 + - uses: actions/checkout@v4 + - name: Setup Riscv + run: | + sudo apt-get update || true + sudo apt-get install -y --no-install-recommends \ + build-essential \ + gcc-14-riscv64-linux-gnu \ + g++-14-riscv64-linux-gnu \ + cmake + + - name: Build + run: | + cmake -B build -DLLAMA_CURL=OFF \ + -DCMAKE_BUILD_TYPE=Release \ + -DGGML_OPENMP=OFF \ + -DLLAMA_BUILD_EXAMPLES=ON \ + -DLLAMA_BUILD_TOOLS=ON \ + -DLLAMA_BUILD_TESTS=OFF \ + -DCMAKE_SYSTEM_NAME=Linux \ + -DCMAKE_SYSTEM_PROCESSOR=riscv64 \ + -DCMAKE_C_COMPILER=riscv64-linux-gnu-gcc-14 \ + -DCMAKE_CXX_COMPILER=riscv64-linux-gnu-g++-14 \ + -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + -DCMAKE_FIND_ROOT_PATH=/usr/lib/riscv64-linux-gnu \ + -DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=NEVER \ + -DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY \ + -DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=BOTH + + cmake --build build --config Release -j $(nproc) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3d4f837e24..43553ac13b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -64,7 +64,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: macOS-latest-cmake-arm64 evict-old-files: 1d @@ -104,7 +104,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: macOS-latest-cmake-x64 evict-old-files: 1d @@ -144,7 +144,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: macOS-latest-cmake-arm64-webgpu evict-old-files: 1d @@ -199,7 +199,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-cpu-cmake evict-old-files: 1d @@ -251,7 +251,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-latest-cmake-sanitizer-${{ matrix.sanitizer }} evict-old-files: 1d @@ -330,7 +330,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-latest-cmake-rpc evict-old-files: 1d @@ -363,7 +363,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-22-cmake-vulkan evict-old-files: 1d @@ -400,7 +400,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-22-cmake-webgpu evict-old-files: 1d @@ -443,7 +443,7 @@ jobs: ubuntu-22-cmake-hip: runs-on: ubuntu-22.04 - container: rocm/dev-ubuntu-22.04:6.0.2 + container: rocm/dev-ubuntu-22.04:6.1.2 steps: - name: Clone @@ -457,7 +457,7 @@ jobs: sudo apt-get install -y build-essential git cmake rocblas-dev hipblas-dev libcurl4-openssl-dev - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-22-cmake-hip evict-old-files: 1d @@ -471,16 +471,6 @@ jobs: -DGGML_HIP=ON cmake --build build --config Release -j $(nproc) - - name: Build with legacy HIP support - id: cmake_build_legacy_hip - run: | - cmake -B build2 -S . \ - -DCMAKE_C_COMPILER=hipcc \ - -DCMAKE_CXX_COMPILER=hipcc \ - -DGGML_HIP_ROCWMMA_FATTN=ON \ - -DGGML_HIP=ON - cmake --build build2 --config Release -j $(nproc) - ubuntu-22-cmake-musa: runs-on: ubuntu-22.04 container: mthreads/musa:rc4.2.0-devel-ubuntu22.04-amd64 @@ -497,7 +487,7 @@ jobs: apt-get install -y build-essential git cmake libcurl4-openssl-dev - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-22-cmake-musa evict-old-files: 1d @@ -542,7 +532,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-22-cmake-sycl evict-old-files: 1d @@ -590,7 +580,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-22-cmake-sycl-fp16 evict-old-files: 1d @@ -621,7 +611,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: macOS-latest-cmake-ios evict-old-files: 1d @@ -658,7 +648,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: macOS-latest-cmake-tvos evict-old-files: 1d @@ -730,7 +720,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: macOS-latest-swift evict-old-files: 1d @@ -776,7 +766,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: windows-msys2 variant: ccache @@ -844,7 +834,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: windows-latest-cmake-${{ matrix.build }} variant: ccache @@ -958,7 +948,7 @@ jobs: apt install -y cmake build-essential ninja-build libgomp1 git libcurl4-openssl-dev - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-latest-cmake-cuda evict-old-files: 1d @@ -987,7 +977,7 @@ jobs: uses: actions/checkout@v4 - name: Install ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: windows-cuda-${{ matrix.cuda }} variant: ccache @@ -1043,7 +1033,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: windows-latest-cmake-sycl variant: ccache @@ -1080,7 +1070,8 @@ jobs: write-host "Downloading AMD HIP SDK Installer" Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q3-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe" write-host "Installing AMD HIP SDK" - Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -Wait + $proc = Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -PassThru + $proc.WaitForExit(600000) write-host "Completed AMD HIP SDK installation" - name: Verify ROCm @@ -1089,7 +1080,7 @@ jobs: & 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' --version - name: Install ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ${{ github.job }} evict-old-files: 1d @@ -1123,6 +1114,11 @@ jobs: - name: Checkout code uses: actions/checkout@v4 + - name: Setup Xcode + uses: maxim-lobanov/setup-xcode@v1 + with: + xcode-version: latest-stable + - name: Build id: cmake_build run: | @@ -1156,7 +1152,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: android-build evict-old-files: 1d diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml new file mode 100644 index 0000000000..2d61ce3035 --- /dev/null +++ b/.github/workflows/copilot-setup-steps.yml @@ -0,0 +1,53 @@ +name: "Copilot Setup Steps" + +# Automatically run the setup steps when they are changed to allow for easy validation, and +# allow manual testing through the repository's "Actions" tab +on: + workflow_dispatch: + push: + paths: + - .github/workflows/copilot-setup-steps.yml + pull_request: + paths: + - .github/workflows/copilot-setup-steps.yml + +jobs: + # The job MUST be called `copilot-setup-steps` or it will not be picked up by Copilot. + copilot-setup-steps: + runs-on: ubuntu-latest + + # Set the permissions to the lowest permissions possible needed for your steps. + # Copilot will be given its own token for its operations. + permissions: + # If you want to clone the repository as part of your setup steps, for example to install dependencies, you'll need the `contents: read` permission. If you don't clone the repository in your setup steps, Copilot will do this for you automatically after the steps complete. + contents: read + + # You can define any steps you want, and they will run before the agent starts. + # If you do not check out your code, Copilot will do this for you. + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: ccache + uses: ggml-org/ccache-action@v1.2.16 + with: + key: copilot-setup-steps + evict-old-files: 1d + + - name: Dependencies + id: depends + run: | + sudo apt-get update + sudo apt-get install build-essential libcurl4-openssl-dev + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install Python dependencies + run: | + python3 -m venv .venv + .venv/bin/activate + pip install -r requirements/requirements-all.txt -r tools/server/tests/requirements.txt + pip install flake8 pyright diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4ed6126f48..5367637e42 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -32,7 +32,7 @@ jobs: fetch-depth: 0 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: macOS-latest-cmake-arm64 evict-old-files: 1d @@ -85,7 +85,7 @@ jobs: fetch-depth: 0 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: macOS-latest-cmake-x64 evict-old-files: 1d @@ -147,7 +147,7 @@ jobs: fetch-depth: 0 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-cpu-cmake evict-old-files: 1d @@ -198,7 +198,7 @@ jobs: fetch-depth: 0 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: ubuntu-22-cmake-vulkan evict-old-files: 1d @@ -256,7 +256,7 @@ jobs: fetch-depth: 0 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: windows-latest-cmake-cpu-${{ matrix.arch }} variant: ccache @@ -328,7 +328,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: windows-latest-cmake-${{ matrix.backend }}-${{ matrix.arch }} variant: ccache @@ -398,7 +398,7 @@ jobs: uses: actions/checkout@v4 - name: Install ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: windows-cuda-${{ matrix.cuda }} variant: ccache @@ -471,7 +471,7 @@ jobs: uses: actions/checkout@v4 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: windows-latest-cmake-sycl variant: ccache @@ -545,7 +545,7 @@ jobs: git clone https://github.com/rocm/rocwmma --branch rocm-6.2.4 --depth 1 - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.16 + uses: ggml-org/ccache-action@v1.2.16 with: key: windows-latest-cmake-hip-${{ matrix.name }}-x64 evict-old-files: 1d @@ -557,7 +557,8 @@ jobs: write-host "Downloading AMD HIP SDK Installer" Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q3-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe" write-host "Installing AMD HIP SDK" - Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -Wait + $proc = Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -PassThru + $proc.WaitForExit(600000) write-host "Completed AMD HIP SDK installation" - name: Verify ROCm @@ -600,7 +601,7 @@ jobs: name: llama-bin-win-hip-${{ matrix.name }}-x64.zip ios-xcode-build: - runs-on: macos-latest + runs-on: macos-15 steps: - name: Checkout code @@ -608,6 +609,10 @@ jobs: with: fetch-depth: 0 + - name: Setup Xcode + run: | + sudo xcode-select -s /Applications/Xcode_16.4.app + - name: Build id: cmake_build run: | diff --git a/CMakeLists.txt b/CMakeLists.txt index c79ccd09e0..36a2078e4c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -12,6 +12,8 @@ if (NOT XCODE AND NOT MSVC AND NOT CMAKE_BUILD_TYPE) set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" "MinSizeRel" "RelWithDebInfo") endif() +message("CMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}") + # Add path to modules list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/") diff --git a/CODEOWNERS b/CODEOWNERS index 4c0dd4b725..18564a08b1 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -5,8 +5,8 @@ /tools/server/ @ngxson /ggml/src/ggml-cuda/fattn* @JohannesGaessler /ggml/src/ggml-cuda/mmq.* @JohannesGaessler -/ggml/src/ggml-cuda/mmv.* @JohannesGaessler /ggml/src/ggml-cuda/mmvq.* @JohannesGaessler /ggml/src/ggml-opt.cpp @JohannesGaessler /ggml/src/gguf.cpp @JohannesGaessler /ggml/src/ggml-vulkan/ @0cc4m +/ggml/src/ggml-zdnn/ @taronaeo diff --git a/README.md b/README.md index 954fff83da..8446756384 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,8 @@ LLM inference in C/C++ ## Hot topics +- **[guide : running gpt-oss with llama.cpp](https://github.com/ggml-org/llama.cpp/discussions/15396)** +- **[[FEEDBACK] Better packaging for llama.cpp to support downstream consumers 🤗](https://github.com/ggml-org/llama.cpp/discussions/15313)** - Support for the `gpt-oss` model with native MXFP4 format has been added | [PR](https://github.com/ggml-org/llama.cpp/pull/15091) | [Collaboration with NVIDIA](https://blogs.nvidia.com/blog/rtx-ai-garage-openai-oss) | [Comment](https://github.com/ggml-org/llama.cpp/discussions/15095) - Hot PRs: [All](https://github.com/ggml-org/llama.cpp/pulls?q=is%3Apr+label%3Ahot+) | [Open](https://github.com/ggml-org/llama.cpp/pulls?q=is%3Apr+label%3Ahot+is%3Aopen) - Multimodal support arrived in `llama-server`: [#12898](https://github.com/ggml-org/llama.cpp/pull/12898) | [documentation](./docs/multimodal.md) @@ -240,7 +242,7 @@ Instructions for adding support for new models: [HOWTO-add-model.md](docs/develo
Infrastructure -- [Paddler](https://github.com/distantmagic/paddler) - Stateful load balancer custom-tailored for llama.cpp +- [Paddler](https://github.com/intentee/paddler) - Open-source LLMOps platform for hosting and scaling AI in your own infrastructure - [GPUStack](https://github.com/gpustack/gpustack) - Manage GPU clusters for running LLMs - [llama_cpp_canister](https://github.com/onicai/llama_cpp_canister) - llama.cpp as a smart contract on the Internet Computer, using WebAssembly - [llama-swap](https://github.com/mostlygeek/llama-swap) - transparent proxy that adds automatic model switching with llama-server diff --git a/common/arg.cpp b/common/arg.cpp index 0f01bb3145..d9f6ec96d0 100644 --- a/common/arg.cpp +++ b/common/arg.cpp @@ -749,6 +749,39 @@ std::pair> common_remote_get_content(const std::string & // utils // +// Helper function to parse tensor buffer override strings +static void parse_tensor_buffer_overrides(const std::string & value, std::vector & overrides) { + std::map buft_list; + for (size_t i = 0; i < ggml_backend_dev_count(); ++i) { + auto * dev = ggml_backend_dev_get(i); + auto * buft = ggml_backend_dev_buffer_type(dev); + if (buft) { + buft_list[ggml_backend_buft_name(buft)] = buft; + } + } + + for (const auto & override : string_split(value, ',')) { + std::string::size_type pos = override.find('='); + if (pos == std::string::npos) { + throw std::invalid_argument("invalid value"); + } + std::string tensor_name = override.substr(0, pos); + std::string buffer_type = override.substr(pos + 1); + + if (buft_list.find(buffer_type) == buft_list.end()) { + printf("Available buffer types:\n"); + for (const auto & it : buft_list) { + printf(" %s\n", ggml_backend_buft_name(it.second)); + } + throw std::invalid_argument("unknown buffer type"); + } + // keep strings alive and avoid leaking memory by storing them in a static vector + static std::list buft_overrides; + buft_overrides.push_back(tensor_name); + overrides.push_back({buft_overrides.back().c_str(), buft_list.at(buffer_type)}); + } +} + struct handle_model_result { bool found_mmproj = false; common_params_model mmproj; @@ -993,6 +1026,10 @@ static bool common_params_parse_ex(int argc, char ** argv, common_params_context params.tensor_buft_overrides.push_back({nullptr, nullptr}); } + if (!params.speculative.tensor_buft_overrides.empty()) { + params.speculative.tensor_buft_overrides.push_back({nullptr, nullptr}); + } + if (!params.chat_template.empty() && !common_chat_verify_template(params.chat_template, params.use_jinja)) { throw std::runtime_error(string_format( "error: the supplied chat template is not supported: %s%s\n", @@ -1201,6 +1238,7 @@ bool common_params_parse(int argc, char ** argv, common_params & params, llama_e common_params_print_completion(ctx_arg); exit(0); } + params.lr.init(); } catch (const std::invalid_argument & ex) { fprintf(stderr, "%s\n", ex.what()); ctx_arg.params = params_org; @@ -1469,6 +1507,14 @@ common_params_context common_params_parser_init(common_params & params, llama_ex params.swa_full = true; } ).set_env("LLAMA_ARG_SWA_FULL")); + add_opt(common_arg( + {"--swa-checkpoints"}, "N", + string_format("max number of SWA checkpoints per slot to create (default: %d)\n" + "[(more info)](https://github.com/ggml-org/llama.cpp/pull/15293)", params.n_swa_checkpoints), + [](common_params & params, int value) { + params.n_swa_checkpoints = value; + } + ).set_env("LLAMA_ARG_SWA_CHECKPOINTS").set_examples({LLAMA_EXAMPLE_SERVER})); add_opt(common_arg( {"--kv-unified", "-kvu"}, string_format("use single unified KV buffer for the KV cache of all sequences (default: %s)\n" @@ -1484,6 +1530,13 @@ common_params_context common_params_parser_init(common_params & params, llama_ex params.ctx_shift = false; } ).set_examples({LLAMA_EXAMPLE_MAIN, LLAMA_EXAMPLE_SERVER, LLAMA_EXAMPLE_IMATRIX, LLAMA_EXAMPLE_PERPLEXITY}).set_env("LLAMA_ARG_NO_CONTEXT_SHIFT")); + add_opt(common_arg( + {"--context-shift"}, + string_format("enables context shift on infinite text generation (default: %s)", params.ctx_shift ? "disabled" : "enabled"), + [](common_params & params) { + params.ctx_shift = true; + } + ).set_examples({LLAMA_EXAMPLE_MAIN, LLAMA_EXAMPLE_SERVER, LLAMA_EXAMPLE_IMATRIX, LLAMA_EXAMPLE_PERPLEXITY}).set_env("LLAMA_ARG_CONTEXT_SHIFT")); add_opt(common_arg( {"--chunks"}, "N", string_format("max number of chunks to process (default: %d, -1 = all)", params.n_chunks), @@ -1777,7 +1830,7 @@ common_params_context common_params_parser_init(common_params & params, llama_ex [](common_params & params, const std::string & value) { params.sampling.top_n_sigma = std::stof(value); } - ).set_examples({LLAMA_EXAMPLE_MAIN}).set_sparam()); + ).set_sparam()); add_opt(common_arg( {"--xtc-probability"}, "N", string_format("xtc probability (default: %.1f, 0.0 = disabled)", (double)params.sampling.xtc_probability), @@ -2349,40 +2402,15 @@ common_params_context common_params_parser_init(common_params & params, llama_ex add_opt(common_arg( {"--override-tensor", "-ot"}, "=,...", "override tensor buffer type", [](common_params & params, const std::string & value) { - /* static */ std::map buft_list; - if (buft_list.empty()) { - // enumerate all the devices and add their buffer types to the list - for (size_t i = 0; i < ggml_backend_dev_count(); ++i) { - auto * dev = ggml_backend_dev_get(i); - auto * buft = ggml_backend_dev_buffer_type(dev); - if (buft) { - buft_list[ggml_backend_buft_name(buft)] = buft; - } - } - } - - for (const auto & override : string_split(value, ',')) { - std::string::size_type pos = override.find('='); - if (pos == std::string::npos) { - throw std::invalid_argument("invalid value"); - } - std::string tensor_name = override.substr(0, pos); - std::string buffer_type = override.substr(pos + 1); - - if (buft_list.find(buffer_type) == buft_list.end()) { - printf("Available buffer types:\n"); - for (const auto & it : buft_list) { - printf(" %s\n", ggml_backend_buft_name(it.second)); - } - throw std::invalid_argument("unknown buffer type"); - } - // keep strings alive and avoid leaking memory by storing them in a static vector - static std::list buft_overrides; - buft_overrides.push_back(tensor_name); - params.tensor_buft_overrides.push_back({buft_overrides.back().c_str(), buft_list.at(buffer_type)}); - } + parse_tensor_buffer_overrides(value, params.tensor_buft_overrides); } )); + add_opt(common_arg( + {"--override-tensor-draft", "-otd"}, "=,...", + "override tensor buffer type for draft model", [](common_params & params, const std::string & value) { + parse_tensor_buffer_overrides(value, params.speculative.tensor_buft_overrides); + } + ).set_examples({LLAMA_EXAMPLE_SPECULATIVE, LLAMA_EXAMPLE_SERVER})); add_opt(common_arg( {"--cpu-moe", "-cmoe"}, "keep all Mixture of Experts (MoE) weights in the CPU", @@ -2405,6 +2433,27 @@ common_params_context common_params_parser_init(common_params & params, llama_ex } } ).set_env("LLAMA_ARG_N_CPU_MOE")); + add_opt(common_arg( + {"--cpu-moe-draft", "-cmoed"}, + "keep all Mixture of Experts (MoE) weights in the CPU for the draft model", + [](common_params & params) { + params.speculative.tensor_buft_overrides.push_back({"\\.ffn_(up|down|gate)_exps", ggml_backend_cpu_buffer_type()}); + } + ).set_examples({LLAMA_EXAMPLE_SPECULATIVE, LLAMA_EXAMPLE_SERVER}).set_env("LLAMA_ARG_CPU_MOE_DRAFT")); + add_opt(common_arg( + {"--n-cpu-moe-draft", "-ncmoed"}, "N", + "keep the Mixture of Experts (MoE) weights of the first N layers in the CPU for the draft model", + [](common_params & params, int value) { + if (value < 0) { + throw std::invalid_argument("invalid value"); + } + for (int i = 0; i < value; ++i) { + static std::list buft_overrides_draft; + buft_overrides_draft.push_back(string_format("blk\\.%d\\.ffn_(up|down|gate)_exps", i)); + params.speculative.tensor_buft_overrides.push_back({buft_overrides_draft.back().c_str(), ggml_backend_cpu_buffer_type()}); + } + } + ).set_examples({LLAMA_EXAMPLE_SPECULATIVE, LLAMA_EXAMPLE_SERVER}).set_env("LLAMA_ARG_N_CPU_MOE_DRAFT")); add_opt(common_arg( {"-ngl", "--gpu-layers", "--n-gpu-layers"}, "N", "number of layers to store in VRAM", @@ -2655,7 +2704,7 @@ common_params_context common_params_parser_init(common_params & params, llama_ex [](common_params & params, const std::string & value) { params.out_file = value; } - ).set_examples({LLAMA_EXAMPLE_IMATRIX, LLAMA_EXAMPLE_CVECTOR_GENERATOR, LLAMA_EXAMPLE_EXPORT_LORA, LLAMA_EXAMPLE_TTS})); + ).set_examples({LLAMA_EXAMPLE_IMATRIX, LLAMA_EXAMPLE_CVECTOR_GENERATOR, LLAMA_EXAMPLE_EXPORT_LORA, LLAMA_EXAMPLE_TTS, LLAMA_EXAMPLE_FINETUNE})); add_opt(common_arg( {"-ofreq", "--output-frequency"}, "N", string_format("output the imatrix every N iterations (default: %d)", params.n_out_freq), @@ -2949,11 +2998,7 @@ common_params_context common_params_parser_init(common_params & params, llama_ex "- deepseek: puts thoughts in `message.reasoning_content` (except in streaming mode, which behaves as `none`)\n" "(default: auto)", [](common_params & params, const std::string & value) { - /**/ if (value == "deepseek") { params.reasoning_format = COMMON_REASONING_FORMAT_DEEPSEEK; } - else if (value == "deepseek-legacy") { params.reasoning_format = COMMON_REASONING_FORMAT_DEEPSEEK_LEGACY; } - else if (value == "none") { params.reasoning_format = COMMON_REASONING_FORMAT_NONE; } - else if (value == "auto") { params.reasoning_format = COMMON_REASONING_FORMAT_AUTO; } - else { throw std::invalid_argument("invalid value"); } + params.reasoning_format = common_reasoning_format_from_name(value); } ).set_examples({LLAMA_EXAMPLE_SERVER, LLAMA_EXAMPLE_MAIN}).set_env("LLAMA_ARG_THINK")); add_opt(common_arg( @@ -3134,7 +3179,7 @@ common_params_context common_params_parser_init(common_params & params, llama_ex params.speculative.cpuparams.n_threads = std::thread::hardware_concurrency(); } } - ).set_examples({LLAMA_EXAMPLE_SPECULATIVE})); + ).set_examples({LLAMA_EXAMPLE_SPECULATIVE, LLAMA_EXAMPLE_SERVER})); add_opt(common_arg( {"-tbd", "--threads-batch-draft"}, "N", "number of threads to use during batch and prompt processing (default: same as --threads-draft)", @@ -3144,7 +3189,7 @@ common_params_context common_params_parser_init(common_params & params, llama_ex params.speculative.cpuparams_batch.n_threads = std::thread::hardware_concurrency(); } } - ).set_examples({LLAMA_EXAMPLE_SPECULATIVE})); + ).set_examples({LLAMA_EXAMPLE_SPECULATIVE, LLAMA_EXAMPLE_SERVER})); add_opt(common_arg( {"-Cd", "--cpu-mask-draft"}, "M", "Draft model CPU affinity mask. Complements cpu-range-draft (default: same as --cpu-mask)", @@ -3537,5 +3582,51 @@ common_params_context common_params_parser_init(common_params & params, llama_ex ).set_examples({ LLAMA_EXAMPLE_DIFFUSION })); + add_opt( + common_arg({ "-lr", "--learning-rate" }, "ALPHA", + string_format( + "adamw or sgd optimizer alpha (default: %.2g); note: sgd alpha recommended ~10x (no momentum)", + (double) params.lr.lr0), + [](common_params & params, const std::string & value) { params.lr.lr0 = std::stof(value); }) + .set_examples({ LLAMA_EXAMPLE_FINETUNE })); + add_opt( + common_arg({ "-lr-min", "--learning-rate-min" }, "ALPHA", + string_format( + "(if >0) final learning rate after decay (if -decay-epochs is set, default=%.2g)", + (double) params.lr.lr_min), + [](common_params & params, const std::string & value) { params.lr.lr_min = std::stof(value); }) + .set_examples({ LLAMA_EXAMPLE_FINETUNE })); + add_opt( + common_arg({ "-decay-epochs", "--learning-rate-decay-epochs" }, "ALPHA", + string_format( + "(if >0) decay learning rate to -lr-min after this many epochs (exponential decay, default=%.2g)", + (double) params.lr.decay_epochs), + [](common_params & params, const std::string & value) { params.lr.decay_epochs = std::stof(value); }) + .set_examples({ LLAMA_EXAMPLE_FINETUNE })); + add_opt(common_arg( + { "-wd", "--weight-decay" }, "WD", + string_format( + "adamw or sgd optimizer weight decay (0 is off; recommend very small e.g. 1e-9) (default: %.2g).", + (double) params.lr.wd), + [](common_params & params, const std::string & value) { params.lr.wd = std::stof(value); }) + .set_examples({ LLAMA_EXAMPLE_FINETUNE })); + add_opt(common_arg({ "-val-split", "--val-split" }, "FRACTION", + string_format("fraction of data to use as validation set for training (default: %.2g).", + (double) params.val_split), + [](common_params & params, const std::string & value) { params.val_split = std::stof(value); }) + .set_examples({ LLAMA_EXAMPLE_FINETUNE })); + add_opt(common_arg({ "-epochs", "--epochs" }, "N", + string_format("optimizer max # of epochs (default: %d)", params.lr.epochs), + [](common_params & params, int epochs) { params.lr.epochs = epochs; }) + .set_examples({ LLAMA_EXAMPLE_FINETUNE })); + add_opt(common_arg({ "-opt", "--optimizer" }, "sgd|adamw", "adamw or sgd", + [](common_params & params, const std::string & name) { + params.optimizer = common_opt_get_optimizer(name.c_str()); + if (params.optimizer == GGML_OPT_OPTIMIZER_TYPE_COUNT) { + throw std::invalid_argument("invalid --optimizer, valid options: adamw, sgd"); + } + }) + .set_examples({ LLAMA_EXAMPLE_FINETUNE })); + return ctx_arg; } diff --git a/common/chat.cpp b/common/chat.cpp index 316bd24170..5fe5643d38 100644 --- a/common/chat.cpp +++ b/common/chat.cpp @@ -296,6 +296,7 @@ json common_chat_msgs_to_json_oaicompat(const std::vector & msg } if (!msg.reasoning_content.empty()) { jmsg["reasoning_content"] = msg.reasoning_content; + jmsg["thinking"] = msg.reasoning_content; // gpt-oss } if (!msg.tool_name.empty()) { jmsg["name"] = msg.tool_name; @@ -472,11 +473,12 @@ std::string common_chat_format_single( return ss.str(); } -std::string common_chat_format_example(const struct common_chat_templates * tmpls, bool use_jinja) { +std::string common_chat_format_example(const struct common_chat_templates * tmpls, bool use_jinja, const std::map & chat_template_kwargs) { common_chat_templates_inputs inputs; inputs.use_jinja = use_jinja; inputs.add_bos = tmpls->add_bos; inputs.add_eos = tmpls->add_eos; + inputs.chat_template_kwargs = chat_template_kwargs; auto add_simple_msg = [&](auto role, auto content) { common_chat_msg msg; msg.role = role; @@ -552,6 +554,17 @@ common_chat_templates_ptr common_chat_templates_init( default_template_src = CHATML_TEMPLATE_SRC; } } + + // TODO @ngxson : this is a temporary hack to prevent chat template from throwing an error + // Ref: https://github.com/ggml-org/llama.cpp/pull/15230#issuecomment-3173959633 + if (default_template_src.find("<|channel|>") != std::string::npos + // search for the error message and patch it + && default_template_src.find("in message.content or") != std::string::npos) { + string_replace_all(default_template_src, + "{%- if \"<|channel|>analysis<|message|>\" in message.content or \"<|channel|>final<|message|>\" in message.content %}", + "{%- if false %}"); + } + std::string token_bos = bos_token_override; std::string token_eos = eos_token_override; bool add_bos = false; @@ -619,12 +632,24 @@ const char * common_reasoning_format_name(common_reasoning_format format) { case COMMON_REASONING_FORMAT_AUTO: return "auto"; case COMMON_REASONING_FORMAT_DEEPSEEK: return "deepseek"; case COMMON_REASONING_FORMAT_DEEPSEEK_LEGACY: return "deepseek-legacy"; - case COMMON_REASONING_FORMAT_GRANITE: return "granite"; default: throw std::runtime_error("Unknown reasoning format"); } } +common_reasoning_format common_reasoning_format_from_name(const std::string & format) { + if (format == "none") { + return COMMON_REASONING_FORMAT_NONE; + } else if (format == "auto") { + return COMMON_REASONING_FORMAT_AUTO; + } else if (format == "deepseek") { + return COMMON_REASONING_FORMAT_DEEPSEEK; + } else if (format == "deepseek-legacy") { + return COMMON_REASONING_FORMAT_DEEPSEEK_LEGACY; + } + throw std::runtime_error("Unknown reasoning format: " + format); +} + static std::string wrap_code_as_arguments(common_chat_msg_parser & builder, const std::string & code) { std::string arguments; if (builder.is_partial()) { @@ -1314,16 +1339,164 @@ static common_chat_params common_chat_params_init_gpt_oss(const common_chat_temp data.prompt = prompt; data.format = COMMON_CHAT_FORMAT_GPT_OSS; - // TODO: support tool calls in GPT-OSS? + // These special tokens are required to parse properly, so we include them + // even if parse_tool_calls is false. + data.preserved_tokens = { + "<|channel|>", + "<|constrain|>", + "<|message|>", + "<|start|>", + "<|end|>", + }; + + if (inputs.tools.is_array() && !inputs.tools.empty()) { + data.grammar_lazy = inputs.tool_choice != COMMON_CHAT_TOOL_CHOICE_REQUIRED; + data.grammar = build_grammar([&](const common_grammar_builder & builder) { + // tool calls can appear in commentary or analysis channels + auto channel = builder.add_rule("channel", "\"<|channel|>\" ( \"commentary\" | \"analysis\" )"); + + std::vector tool_rules_recipient_in_role; + std::vector tool_rules_recipient_in_channel; + foreach_function(inputs.tools, [&](const json & tool) { + const auto & function = tool.at("function"); + std::string name = function.at("name"); + auto parameters = function.at("parameters"); + builder.resolve_refs(parameters); + + tool_rules_recipient_in_role.push_back( + builder.add_rule(name + "-call", + "\"" + name + "\"" + channel + " \" <|constrain|>json\"? \"<|message|>\" " + + builder.add_schema(name + "-args", parameters) + ) + ); + + tool_rules_recipient_in_channel.push_back( + builder.add_rule(name + "-call", + "\"" + name + "\"" + " \" <|constrain|>json\"? \"<|message|>\" " + + builder.add_schema(name + "-args", parameters) + ) + ); + }); + + auto recipient_in_role = builder.add_rule("recipient_in_role", + "\"<|start|>assistant\"? \" to=functions.\" ( " + + string_join(tool_rules_recipient_in_role, " | ") + " )" + ); + + auto recipient_in_channel = builder.add_rule("recipient_in_channel", + channel + " \" to=functions.\" ( " + + string_join(tool_rules_recipient_in_channel, " | ") + " )" + ); + + builder.add_rule("root", recipient_in_role + " | " + recipient_in_channel); + + // Trigger on tool calls that appear in the commentary channel + data.grammar_triggers.push_back({ + COMMON_GRAMMAR_TRIGGER_TYPE_PATTERN, + "<\\|channel\\|>(commentary|analysis) to" + }); + + // Trigger tool calls that appear in the role section, either at the + // start or in the middle. + data.grammar_triggers.push_back({ + COMMON_GRAMMAR_TRIGGER_TYPE_PATTERN_FULL, + "^ to" + }); + + data.grammar_triggers.push_back({ + COMMON_GRAMMAR_TRIGGER_TYPE_PATTERN, + "<\\|start\\|>assistant to" + }); + }); + } return data; } static void common_chat_parse_gpt_oss(common_chat_msg_parser & builder) { - // TODO @ngxson : this won't work with --special enabled, we should fix that - builder.try_parse_reasoning("<|channel|>analysis<|message|>", "<|start|>assistant<|channel|>final<|message|>"); - if (!builder.syntax().parse_tool_calls) { - builder.add_content(builder.consume_rest()); - return; + static const std::string constraint = "(?: (<\\|constrain\\|>)?([a-zA-Z0-9_-]+))"; + static const std::string recipient("(?: to=functions\\.([^<\\s]+))"); + + static const common_regex start_regex("<\\|start\\|>assistant"); + static const common_regex analysis_regex("<\\|channel\\|>analysis"); + static const common_regex final_regex("<\\|channel\\|>final" + constraint + "?"); + static const common_regex preamble_regex("<\\|channel\\|>commentary"); + static const common_regex tool_call1_regex(recipient + "<\\|channel\\|>(analysis|commentary)" + constraint + "?"); + static const common_regex tool_call2_regex("<\\|channel\\|>(analysis|commentary)" + recipient + constraint + "?"); + + auto consume_end = [&](bool include_end = false) { + if (auto res = builder.try_find_literal("<|end|>")) { + return res->prelude + (include_end ? builder.str(res->groups[0]) : ""); + } + return builder.consume_rest(); + }; + + auto handle_tool_call = [&](const std::string & name) { + if (auto args = builder.try_consume_json_with_dumped_args({{}})) { + if (builder.syntax().parse_tool_calls) { + if (!builder.add_tool_call(name, "", args->value) || args->is_partial) { + throw common_chat_msg_partial_exception("incomplete tool call"); + } + } else if (args->is_partial) { + throw common_chat_msg_partial_exception("incomplete tool call"); + } + } + }; + + auto regex_match = [](const common_regex & regex, const std::string & input) -> std::optional { + auto match = regex.search(input, 0, true); + if (match.type == COMMON_REGEX_MATCH_TYPE_FULL) { + return match; + } + return std::nullopt; + }; + + do { + auto header_start_pos = builder.pos(); + auto content_start = builder.try_find_literal("<|message|>"); + if (!content_start) { + throw common_chat_msg_partial_exception("incomplete header"); + } + + auto header = content_start->prelude; + + if (auto match = regex_match(tool_call1_regex, header)) { + auto group = match->groups[1]; + auto name = header.substr(group.begin, group.end - group.begin); + handle_tool_call(name); + continue; + } + + if (auto match = regex_match(tool_call2_regex, header)) { + auto group = match->groups[2]; + auto name = header.substr(group.begin, group.end - group.begin); + handle_tool_call(name); + continue; + } + + if (regex_match(analysis_regex, header)) { + builder.move_to(header_start_pos); + if (builder.syntax().reasoning_format == COMMON_REASONING_FORMAT_NONE || builder.syntax().reasoning_in_content) { + builder.add_content(consume_end(true)); + } else { + builder.try_parse_reasoning("<|channel|>analysis<|message|>", "<|end|>"); + } + continue; + } + + if(regex_match(final_regex, header) || regex_match(preamble_regex, header)) { + builder.add_content(consume_end()); + continue; + } + + // Possibly a malformed message, attempt to recover by rolling + // back to pick up the next <|start|> + LOG_DBG("%s: unknown header from message: %s\n", __func__, header.c_str()); + builder.move_to(header_start_pos); + } while (builder.try_find_regex(start_regex, std::string::npos, false)); + + auto remaining = builder.consume_rest(); + if (!remaining.empty()) { + LOG_DBG("%s: content after last message: %s\n", __func__, remaining.c_str()); } } @@ -1887,8 +2060,8 @@ static common_chat_params common_chat_templates_apply_jinja( params.enable_thinking = inputs.enable_thinking; params.grammar = inputs.grammar; params.now = inputs.now; - params.add_bos = inputs.add_bos; - params.add_eos = inputs.add_eos; + params.add_bos = tmpls->add_bos; + params.add_eos = tmpls->add_eos; params.extra_context = json::object(); for (auto el : inputs.chat_template_kwargs) { diff --git a/common/chat.h b/common/chat.h index eb628d8bc2..d1e480c918 100644 --- a/common/chat.h +++ b/common/chat.h @@ -187,10 +187,12 @@ std::string common_chat_format_single( // Returns an example of formatted chat std::string common_chat_format_example( const struct common_chat_templates * tmpls, - bool use_jinja); + bool use_jinja, + const std::map & chat_template_kwargs); const char* common_chat_format_name(common_chat_format format); const char* common_reasoning_format_name(common_reasoning_format format); +common_reasoning_format common_reasoning_format_from_name(const std::string & format); common_chat_msg common_chat_parse(const std::string & input, bool is_partial, const common_chat_syntax & syntax); common_chat_tool_choice common_chat_tool_choice_parse_oaicompat(const std::string & tool_choice); diff --git a/common/common.cpp b/common/common.cpp index c6962d1d19..67dd5404ff 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -41,6 +41,7 @@ #endif #include #include +#include #include #include #else @@ -1565,3 +1566,56 @@ ggml_opt_dataset_t common_opt_dataset_init(struct llama_context * ctx, const std return result; } + +ggml_opt_optimizer_params common_opt_lr_pars(void * userdata) { + ggml_opt_optimizer_params result = ggml_opt_get_default_optimizer_params(nullptr); + const lr_opt & d = *(lr_opt *) userdata; + result.adamw.alpha = result.sgd.alpha = d.get_lr(d.epoch); + result.sgd.wd = result.adamw.wd = d.wd; + return result; +} + +// TODO make all command line args case-insensitive +static inline bool eq_case_insensitive(char const* a, char const* b) { + return ! +#if defined(_MSC_VER) + _stricmp +#else + strcasecmp +#endif // defined(_MSC_VER) + (a, b); +} + +enum ggml_opt_optimizer_type common_opt_get_optimizer(const char * n) { + if (eq_case_insensitive("adamw", n)) { + return GGML_OPT_OPTIMIZER_TYPE_ADAMW; + } + if (eq_case_insensitive("sgd", n)) { + return GGML_OPT_OPTIMIZER_TYPE_SGD; + } + return GGML_OPT_OPTIMIZER_TYPE_COUNT; +} + +// TODO simplify to use just log and exp +static float const k_log_2 = std::log(2.f); + +void lr_opt::init() { + if (lr_min > 0 && lr_min < lr0) { + float nhalf = std::log(lr0 / lr_min) / k_log_2; + float e = epochs; + if (decay_epochs > 0 && decay_epochs < e) { + e = decay_epochs; + } else { + decay_epochs = e; + } + scale_epoch = nhalf / e; + } +} + +float lr_opt::get_lr(float epoch) const { + float r = lr_min <= 0 ? lr0 : + epoch >= decay_epochs ? lr_min : + lr0 * std::pow(0.5f, epoch * scale_epoch); + LOG_INF("epoch %.2g lr=%.2g\n", epoch, r); + return r; +} diff --git a/common/common.h b/common/common.h index 5eab199af5..920de7b50a 100644 --- a/common/common.h +++ b/common/common.h @@ -2,14 +2,17 @@ #pragma once -#include "llama-cpp.h" - #include +#include #include #include #include #include #include +#include + +#include "ggml-opt.h" +#include "llama-cpp.h" #ifdef _WIN32 #define DIRECTORY_SEPARATOR '\\' @@ -82,6 +85,7 @@ enum llama_example { LLAMA_EXAMPLE_PARALLEL, LLAMA_EXAMPLE_TTS, LLAMA_EXAMPLE_DIFFUSION, + LLAMA_EXAMPLE_FINETUNE, LLAMA_EXAMPLE_COUNT, }; @@ -202,6 +206,7 @@ struct common_params_speculative { float p_split = 0.1f; // speculative decoding split probability float p_min = 0.75f; // minimum speculative decoding probability (greedy) std::vector> replacements; // main to speculative model replacements + std::vector tensor_buft_overrides; ggml_type cache_type_k = GGML_TYPE_F16; // KV cache data type for the K ggml_type cache_type_v = GGML_TYPE_F16; // KV cache data type for the V @@ -234,14 +239,36 @@ struct common_params_diffusion { bool add_gumbel_noise = false; // add gumbel noise to the logits if temp > 0.0 }; +// reasoning API response format (not to be confused as chat template's reasoning format) enum common_reasoning_format { COMMON_REASONING_FORMAT_NONE, - COMMON_REASONING_FORMAT_AUTO, + COMMON_REASONING_FORMAT_AUTO, // Same as deepseek, using `message.reasoning_content` COMMON_REASONING_FORMAT_DEEPSEEK_LEGACY, // Extract thinking tag contents and return as `message.reasoning_content`, or leave inline in tags in stream mode COMMON_REASONING_FORMAT_DEEPSEEK, // Extract thinking tag contents and return as `message.reasoning_content`, including in streaming deltas. - COMMON_REASONING_FORMAT_GRANITE, // Extract thinking tag contents and return as `message.reasoning_content`, including in streaming deltas. + // do not extend this enum unless you absolutely have to + // in most cases, use COMMON_REASONING_FORMAT_AUTO + // see: https://github.com/ggml-org/llama.cpp/pull/15408 }; + +struct lr_opt { + float lr0 = 1e-5; // learning rate at first epoch + float lr_min = -1; + float decay_epochs = -1; // if >0, the learning rate starts at lr0 and decays to lr_min after this many epochs + float scale_epoch = 0; + float wd = 0; + unsigned epochs = 2; + + unsigned epoch; // set by optimizer outer (epochs) loop + // learning rate decay - constant LR per epoch only for now + float get_lr(float e) const; + float get_lr() const { return get_lr(epoch); } + // must call after arg parse, before get_lr + void init(); +}; + +struct ggml_opt_optimizer_params common_opt_lr_pars(void * userdata); + struct common_params { int32_t n_predict = -1; // new tokens to predict int32_t n_ctx = 4096; // context size @@ -348,7 +375,7 @@ struct common_params { bool cont_batching = true; // insert new sequences for decoding on-the-fly bool flash_attn = false; // flash attention bool no_perf = false; // disable performance metrics - bool ctx_shift = true; // context shift on inifinite text generation + bool ctx_shift = false; // context shift on inifinite text generation bool swa_full = false; // use full-size SWA cache (https://github.com/ggml-org/llama.cpp/pull/13194#issuecomment-2868343055) bool kv_unified = false; // enable unified KV cache @@ -376,6 +403,11 @@ struct common_params { bool no_mmproj = false; // explicitly disable multimodal model std::vector image; // path to image file(s) + // finetune + struct lr_opt lr; + enum ggml_opt_optimizer_type optimizer = GGML_OPT_OPTIMIZER_TYPE_ADAMW; + float val_split = 0.05f; // fraction of the data used for the validation set + // embedding bool embedding = false; // get only sentence embedding int32_t embd_normalize = 2; // normalisation for embeddings (-1=none, 0=max absolute int16, 1=taxicab, 2=euclidean, >2=p-norm) @@ -384,11 +416,12 @@ struct common_params { std::string cls_sep = "\t"; // separator of classification sequences // server params - int32_t port = 8080; // server listens on this network port - int32_t timeout_read = 600; // http read timeout in seconds - int32_t timeout_write = timeout_read; // http write timeout in seconds - int32_t n_threads_http = -1; // number of threads to process HTTP requests (TODO: support threadpool) - int32_t n_cache_reuse = 0; // min chunk size to reuse from the cache via KV shifting + int32_t port = 8080; // server listens on this network port + int32_t timeout_read = 600; // http read timeout in seconds + int32_t timeout_write = timeout_read; // http write timeout in seconds + int32_t n_threads_http = -1; // number of threads to process HTTP requests (TODO: support threadpool) + int32_t n_cache_reuse = 0; // min chunk size to reuse from the cache via KV shifting + int32_t n_swa_checkpoints = 3; // max number of SWA checkpoints per slot std::string hostname = "127.0.0.1"; std::string public_path = ""; // NOLINT @@ -703,3 +736,6 @@ const char * const LLM_KV_SPLIT_TENSORS_COUNT = "split.tensors.count"; // ggml_opt_dataset_t common_opt_dataset_init(struct llama_context * ctx, const std::vector & tokens, int64_t stride); + +// "adamw" or "sgd" (case insensitive) +enum ggml_opt_optimizer_type common_opt_get_optimizer(const char *); diff --git a/convert_hf_to_gguf.py b/convert_hf_to_gguf.py index b8c7d97a78..b45c8f1d7f 100755 --- a/convert_hf_to_gguf.py +++ b/convert_hf_to_gguf.py @@ -28,6 +28,14 @@ if TYPE_CHECKING: if 'NO_LOCAL_GGUF' not in os.environ: sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) import gguf +from gguf.vocab import MistralTokenizerType, MistralVocab +from mistral_common.tokens.tokenizers.base import TokenizerVersion +from mistral_common.tokens.tokenizers.multimodal import DATASET_MEAN, DATASET_STD +from mistral_common.tokens.tokenizers.tekken import Tekkenizer +from mistral_common.tokens.tokenizers.sentencepiece import ( + SentencePieceTokenizer, +) + logger = logging.getLogger("hf-to-gguf") @@ -81,6 +89,8 @@ class ModelBase: block_count: int tensor_map: gguf.TensorNameMap + is_mistral_format: bool = False + def __init__(self, dir_model: Path, ftype: gguf.LlamaFileType, fname_out: Path, *, is_big_endian: bool = False, use_temp_file: bool = False, eager: bool = False, metadata_override: Path | None = None, model_name: str | None = None, @@ -106,16 +116,17 @@ class ModelBase: logger.info(f"Using remote model with HuggingFace id: {remote_hf_model_id}") remote_tensors = gguf.utility.SafetensorRemote.get_list_tensors_hf_model(remote_hf_model_id) self.tensor_names = set(name for name in remote_tensors.keys()) - for name, remote_tensor in gguf.utility.SafetensorRemote.get_list_tensors_hf_model(remote_hf_model_id).items(): + for name, remote_tensor in remote_tensors.items(): yield (name, LazyTorchTensor.from_remote_tensor(remote_tensor)) self.get_tensors = get_remote_tensors else: - self.part_names = ModelBase.get_model_part_names(self.dir_model, "model", ".safetensors") + prefix = "model" if not self.is_mistral_format else "consolidated" + self.part_names = ModelBase.get_model_part_names(self.dir_model, prefix, ".safetensors") self.is_safetensors = len(self.part_names) > 0 if not self.is_safetensors: self.part_names = ModelBase.get_model_part_names(self.dir_model, "pytorch_model", ".bin") - self.hparams = ModelBase.load_hparams(self.dir_model) if hparams is None else hparams + self.hparams = ModelBase.load_hparams(self.dir_model, self.is_mistral_format) if hparams is None else hparams self.tensor_names = None self.metadata_override = metadata_override self.model_name = model_name @@ -153,19 +164,23 @@ class ModelBase: def get_tensors(self) -> Iterator[tuple[str, Tensor]]: tensor_names_from_parts: set[str] = set() - index_name = "model.safetensors" if self.is_safetensors else "pytorch_model.bin" - index_name += ".index.json" - index_file = self.dir_model / index_name + if not self.is_mistral_format: + index_name = "model.safetensors" if self.is_safetensors else "pytorch_model.bin" + index_name += ".index.json" + index_file = self.dir_model / index_name - if index_file.is_file(): - self.tensor_names = set() - logger.info(f"gguf: loading model weight map from '{index_name}'") - with open(index_file, "r", encoding="utf-8") as f: - index: dict[str, Any] = json.load(f) - weight_map = index.get("weight_map") - if weight_map is None or not isinstance(weight_map, dict): - raise ValueError(f"Can't load 'weight_map' from {index_name!r}") - self.tensor_names.update(weight_map.keys()) + if index_file.is_file(): + self.tensor_names = set() + logger.info(f"gguf: loading model weight map from '{index_name}'") + with open(index_file, "r", encoding="utf-8") as f: + index: dict[str, Any] = json.load(f) + weight_map = index.get("weight_map") + if weight_map is None or not isinstance(weight_map, dict): + raise ValueError(f"Can't load 'weight_map' from {index_name!r}") + self.tensor_names.update(weight_map.keys()) + else: + self.tensor_names = tensor_names_from_parts + weight_map = {} else: self.tensor_names = tensor_names_from_parts weight_map = {} @@ -426,7 +441,12 @@ class ModelBase: return part_names @staticmethod - def load_hparams(dir_model: Path): + def load_hparams(dir_model: Path, is_mistral_format: bool): + if is_mistral_format: + with open(dir_model / "params.json", "r", encoding="utf-8") as f: + config = json.load(f) + return config + try: # for security reason, we don't allow loading remote code by default # if a model need remote code, we will fallback to config.json @@ -476,7 +496,10 @@ class TextModel(ModelBase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.hf_arch = get_model_architecture(self.hparams, self.model_type) + if not self.is_mistral_format: + self.hf_arch = get_model_architecture(self.hparams, self.model_type) + else: + self.hf_arch = "" if "text_config" in self.hparams: # move the text_config to the root level @@ -542,14 +565,14 @@ class TextModel(ModelBase): self.gguf_writer.add_head_count(n_head) logger.info(f"gguf: head count = {n_head}") - if (n_head_kv := self.hparams.get("num_key_value_heads")) is not None: + if (n_head_kv := self.find_hparam(["num_key_value_heads", "n_kv_heads"], optional=True)) is not None: self.gguf_writer.add_head_count_kv(n_head_kv) logger.info(f"gguf: key-value head count = {n_head_kv}") if (rope_theta := self.hparams.get("rope_theta")) is not None: self.gguf_writer.add_rope_freq_base(rope_theta) logger.info(f"gguf: rope theta = {rope_theta}") - if (f_rms_eps := self.hparams.get("rms_norm_eps")) is not None: + if (f_rms_eps := self.find_hparam(["rms_norm_eps", "norm_eps"], optional=True)) is not None: self.gguf_writer.add_layer_norm_rms_eps(f_rms_eps) logger.info(f"gguf: rms norm epsilon = {f_rms_eps}") if (f_norm_eps := self.find_hparam(["layer_norm_eps", "layer_norm_epsilon", "norm_epsilon"], optional=True)) is not None: @@ -1210,12 +1233,19 @@ class MmprojModel(ModelBase): raise TypeError("MmprojModel must be subclassed with model_arch = gguf.MODEL_ARCH.MMPROJ") # get n_embd of the text model - if "text_config" not in self.hparams: - self.hparams["text_config"] = {} - if "audio_config" not in self.hparams: - self.hparams["audio_config"] = {} - text_config = {**self.hparams, **self.hparams["text_config"]} - self.n_embd_text = text_config.get("hidden_size", text_config.get("n_embd", 0)) + if not self.is_mistral_format: + if "text_config" not in self.hparams: + self.hparams["text_config"] = {} + if "audio_config" not in self.hparams: + self.hparams["audio_config"] = {} + text_config = {**self.hparams, **self.hparams["text_config"]} + self.n_embd_text = text_config.get("hidden_size", text_config.get("n_embd", 0)) + else: + text_config = { + k: v for k, v in self.hparams.items() if k not in ["vision_encoder", "audio_encoder"] + } + self.n_embd_text = text_config.get("hidden_dim", 0) + assert self.n_embd_text > 0, "n_embd not found in hparams" # move vision config to the top level, while preserving the original hparams in global_config @@ -1236,11 +1266,13 @@ class MmprojModel(ModelBase): self.tensor_map = gguf.get_tensor_name_map(gguf.MODEL_ARCH.MMPROJ, self.block_count) # load preprocessor config - with open(self.dir_model / "preprocessor_config.json", "r", encoding="utf-8") as f: - self.preprocessor_config = json.load(f) + if not self.is_mistral_format: + with open(self.dir_model / "preprocessor_config.json", "r", encoding="utf-8") as f: + self.preprocessor_config = json.load(f) def get_vision_config(self) -> dict[str, Any] | None: - return self.global_config.get("vision_config") + config_name = "vision_config" if not self.is_mistral_format else "vision_encoder" + return self.global_config.get(config_name) def get_audio_config(self) -> dict[str, Any] | None: return self.global_config.get("audio_config") @@ -1264,8 +1296,11 @@ class MmprojModel(ModelBase): self.gguf_writer.add_vision_head_count(self.find_vparam(["num_attention_heads"])) # preprocessor config - self.gguf_writer.add_vision_image_mean(self.preprocessor_config["image_mean"]) - self.gguf_writer.add_vision_image_std(self.preprocessor_config["image_std"]) + image_mean = DATASET_MEAN if self.is_mistral_format else self.preprocessor_config["image_mean"] + image_std = DATASET_STD if self.is_mistral_format else self.preprocessor_config["image_std"] + + self.gguf_writer.add_vision_image_mean(image_mean) + self.gguf_writer.add_vision_image_std(image_std) if self.has_audio_encoder: self.gguf_writer.add_clip_has_audio_encoder(True) @@ -1299,6 +1334,12 @@ class MmprojModel(ModelBase): return None raise KeyError(f"could not find any of: {keys}") + def tensor_force_quant(self, name, new_name, bid, n_dims): + del bid, name, n_dims # unused + if ".patch_embd.weight" in new_name: + return gguf.GGMLQuantizationType.F16 if self.ftype == gguf.LlamaFileType.MOSTLY_F16 else gguf.GGMLQuantizationType.F32 + return False + @ModelBase.register("GPTNeoXForCausalLM") class GPTNeoXModel(TextModel): @@ -1924,11 +1965,63 @@ class LlamaModel(TextModel): if self.hf_arch == "VLlama3ForCausalLM": self.hparams["num_attention_heads"] = self.hparams.get("num_attention_heads", 32) + def _set_vocab_mistral(self): + vocab = MistralVocab(self.dir_model) + logger.info( + f"Converting tokenizer {vocab.tokenizer_type} of size {vocab.vocab_size}." + ) + + self.gguf_writer.add_tokenizer_model(vocab.gguf_tokenizer_model) + + tokens = [] + scores = [] + toktypes = [] + + for text, score, toktype in vocab.all_tokens(): + tokens.append(text) + scores.append(score) + toktypes.append(toktype) + + assert len(tokens) == vocab.vocab_size, ( + f"token count ({len(tokens)}) != vocab size ({vocab.vocab_size})" + ) + + if vocab.tokenizer_type == MistralTokenizerType.tekken: + self.gguf_writer.add_tokenizer_pre("tekken") + self.gguf_writer.add_token_merges( + vocab.extract_vocab_merges_from_model() + ) + + logger.info( + f"Setting bos, eos, unk and pad token IDs to {vocab.bos_id}, {vocab.eos_id}, {vocab.unk_id}, {vocab.pad_id}." + ) + + self.gguf_writer.add_bos_token_id(vocab.bos_id) + self.gguf_writer.add_eos_token_id(vocab.eos_id) + self.gguf_writer.add_unk_token_id(vocab.unk_id) + self.gguf_writer.add_pad_token_id(vocab.pad_id) + + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_scores(scores) + self.gguf_writer.add_token_types(toktypes) + self.gguf_writer.add_vocab_size(vocab.vocab_size) + + self.gguf_writer.add_add_bos_token(True) + self.gguf_writer.add_add_eos_token(False) + + template_dir = Path(__file__).parent / "models/templates/" + + template = MistralModel.get_community_chat_template(vocab, template_dir) + self.gguf_writer.add_chat_template(template) + def set_vocab(self): + if self.is_mistral_format: + return self._set_vocab_mistral() + path_tekken_json = self.dir_model / "tekken.json" path_tokenizer_json = self.dir_model / "tokenizer.json" if path_tekken_json.is_file() and not path_tokenizer_json.is_file(): - return self.set_vocab_tekken() + self._set_vocab_mistral() try: self._set_vocab_sentencepiece() @@ -1962,56 +2055,12 @@ class LlamaModel(TextModel): if self.hparams.get("vocab_size", 32000) == 49152: self.gguf_writer.add_add_bos_token(False) - def set_vocab_tekken(self): - vocab = gguf.vocab.MistralVocab(self.dir_model) - self.gguf_writer.add_tokenizer_model(vocab.gguf_tokenizer_model) - - tokens = [] - scores = [] - toktypes = [] - - for text, score, toktype in vocab.all_tokens(): - tokens.append(text) - scores.append(score) - toktypes.append(toktype) - - assert len(tokens) == vocab.vocab_size, ( - f"token count ({len(tokens)}) != vocab size ({vocab.vocab_size})" - ) - - if vocab.tokenizer_type == gguf.vocab.MistralTokenizerType.tekken: - self.gguf_writer.add_tokenizer_pre("tekken") - self.gguf_writer.add_token_merges( - vocab.extract_vocab_merges_from_model() - ) - - logger.info( - f"Setting bos, eos, unk and pad token IDs to {vocab.bos_id}, {vocab.eos_id}, {vocab.unk_id}, {vocab.pad_id}." - ) - - self.gguf_writer.add_bos_token_id(vocab.bos_id) - self.gguf_writer.add_eos_token_id(vocab.eos_id) - self.gguf_writer.add_unk_token_id(vocab.unk_id) - self.gguf_writer.add_pad_token_id(vocab.pad_id) - - self.gguf_writer.add_token_list(tokens) - self.gguf_writer.add_token_scores(scores) - self.gguf_writer.add_token_types(toktypes) - self.gguf_writer.add_vocab_size(vocab.vocab_size) - - self.gguf_writer.add_add_bos_token(True) - self.gguf_writer.add_add_eos_token(False) - - script_dir = Path(__file__).parent - template_path = script_dir / "models/templates/unsloth-mistral-Devstral-Small-2507.jinja" - with open(template_path, "r", encoding="utf-8") as f: - template = f.read() - self.gguf_writer.add_chat_template(template) - def set_gguf_parameters(self): super().set_gguf_parameters() hparams = self.hparams - self.gguf_writer.add_vocab_size(hparams["vocab_size"]) + + if not self.is_mistral_format: + self.gguf_writer.add_vocab_size(hparams["vocab_size"]) if (rope_dim := hparams.get("head_dim")) is None: rope_dim = hparams["hidden_size"] // hparams["num_attention_heads"] @@ -2033,13 +2082,25 @@ class LlamaModel(TextModel): _experts: list[dict[str, Tensor]] | None = None def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]: - n_head = self.hparams["num_attention_heads"] - n_kv_head = self.hparams.get("num_key_value_heads") + n_head = self.find_hparam(["n_heads", "num_attention_heads"]) + n_kv_head = self.find_hparam(["n_kv_heads", "num_key_value_heads"]) + + vision_prefixes = [ + "vision_encoder.", + "vision_language_adapter.", + "patch_merger.", + "pre_mm_projector_norm", + ] + is_multimodal_tensor = "vision_tower" in name \ or "vision_model" in name \ or "audio_tower" in name \ or "model.connector" in name \ - or "multi_modal_projector" in name + or "multi_modal_projector" in name \ + or any( + name.startswith(prefix) + for prefix in vision_prefixes + ) if is_multimodal_tensor: return [] # skip vision tensors @@ -2155,13 +2216,18 @@ class LlavaVisionModel(MmprojModel): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - if self.hparams["model_type"] == "pixtral": + if self.hparams.get("model_type") == "pixtral": # layer_norm_eps is not in config.json, it is hard-coded in modeling_pixtral.py self.hparams["layer_norm_eps"] = self.hparams.get("layer_norm_eps", 1e-5) self.img_break_tok_id = self.get_token_id("[IMG_BREAK]") - logger.info(f"Image break token id: {self.img_break_tok_id}") + elif self.is_mistral_format: + # hparams is already vision config here so norm_eps is only defined in global_config. + self.hparams["norm_eps"] = self.global_config.get("norm_eps", None) + assert self.hparams["norm_eps"] is not None, "norm_eps not found in params.json" + self.img_break_tok_id = self.find_vparam(["image_break_token_id"]) else: raise ValueError(f"Unsupported model type: {self.hparams['model_type']}") + logger.info(f"Image break token id: {self.img_break_tok_id}") def get_token_id(self, token: str) -> int: tokenizer_config_file = self.dir_model / 'tokenizer_config.json' @@ -2175,7 +2241,7 @@ class LlavaVisionModel(MmprojModel): def set_gguf_parameters(self): super().set_gguf_parameters() hparams = self.hparams - if hparams["model_type"] == "pixtral": + if hparams.get("model_type") == "pixtral": self.gguf_writer.add_clip_projector_type(gguf.VisionProjectorType.PIXTRAL) self.gguf_writer.add_vision_attention_layernorm_eps(hparams["layer_norm_eps"]) @@ -2193,18 +2259,30 @@ class LlavaVisionModel(MmprojModel): def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]: del bid # unused - n_head = self.hparams["num_attention_heads"] + n_head = ( + self.hparams["num_attention_heads"] if not self.is_mistral_format else self.find_vparam(["num_attention_heads"]) + ) n_kv_head = n_head - if name.startswith("multi_modal_projector.") or name.startswith("vision_tower."): + valid_prefixes = ( + "multi_modal_projector.", + "vision_tower.", + "vision_encoder.", + "vision_language_adapter.", + "patch_merger.", + "pre_mm_projector_norm", + ) + + if any(name.startswith(prefix) for prefix in valid_prefixes): # process vision tensors - if name.endswith(("q_proj.weight", "q_proj.bias")): + if name.endswith(("q_proj.weight", "q_proj.bias")) and not self.is_mistral_format: data_torch = LlamaModel.permute(data_torch, n_head, n_head) - if name.endswith(("k_proj.weight", "k_proj.bias")): + if name.endswith(("k_proj.weight", "k_proj.bias")) and not self.is_mistral_format: data_torch = LlamaModel.permute(data_torch, n_head, n_kv_head) return [(self.map_tensor_name(name), data_torch)] - if self.img_break_tok_id > 0 and "embed_tokens.weight" in name: + embed_key = "embed_tokens.weight" if not self.is_mistral_format else "tok_embeddings.weight" + if self.img_break_tok_id > 0 and embed_key in name: logger.info(f"Extracting [IMG_BREAK] token embedding from {name}") # for pixtral model, we need to extract the [IMG_BREAK] token embedding img_break_embd = data_torch[self.img_break_tok_id] @@ -2233,10 +2311,9 @@ class SmolVLMModel(MmprojModel): self.gguf_writer.add_vision_use_gelu(True) def tensor_force_quant(self, name, new_name, bid, n_dims): - del bid, new_name, n_dims # unused if ".embeddings." in name: return gguf.GGMLQuantizationType.F32 - return False + return super().tensor_force_quant(name, new_name, bid, n_dims) def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]: del bid # unused @@ -3224,12 +3301,9 @@ class Qwen2VLVisionModel(MmprojModel): self.gguf_writer.add_vision_attention_layernorm_eps(self.global_config.get("rms_norm_eps", 1e-6)) def tensor_force_quant(self, name, new_name, bid, n_dims): - del bid, name, n_dims # unused - if ".patch_embd." in new_name: - return gguf.GGMLQuantizationType.F16 if ".position_embd." in new_name: return gguf.GGMLQuantizationType.F32 - return False + return super().tensor_force_quant(name, new_name, bid, n_dims) def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]: del bid # unused @@ -3302,10 +3376,9 @@ class Qwen25OmniModel(Qwen2VLVisionModel): yield ("audio_tower.embed_positions.weight", pos_embd) def tensor_force_quant(self, name, new_name, bid, n_dims): - del bid, new_name, n_dims # unused if ".conv" in name and ".weight" in name: return gguf.GGMLQuantizationType.F16 - return False + return super().tensor_force_quant(name, new_name, bid, n_dims) def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]: if name.startswith("thinker."): @@ -3351,12 +3424,9 @@ class InternVisionModel(MmprojModel): self.gguf_writer.add_vision_projector_scale_factor(int(1.0 / downsample_ratio)) def tensor_force_quant(self, name, new_name, bid, n_dims): - del bid, name, n_dims # unused - if ".patch_embd." in new_name: - return gguf.GGMLQuantizationType.F16 if ".position_embd." in new_name: return gguf.GGMLQuantizationType.F32 - return False + return super().tensor_force_quant(name, new_name, bid, n_dims) def _mapping_interns1_name(self, name): names_map = { @@ -3526,7 +3596,7 @@ class Qwen3MoeModel(Qwen2MoeModel): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - hparams = ModelBase.load_hparams(self.dir_model) + hparams = ModelBase.load_hparams(self.dir_model, False) self.origin_hf_arch = hparams.get('architectures', [None])[0] def set_vocab(self): @@ -4683,7 +4753,7 @@ class NomicBertModel(BertModel): def __init__(self, dir_model: Path, ftype: gguf.LlamaFileType, fname_out: Path, **kwargs: Any): hparams = kwargs.pop("hparams", None) if hparams is None: - hparams = ModelBase.load_hparams(dir_model) + hparams = ModelBase.load_hparams(dir_model, False) self.is_moe = bool(hparams.get("moe_every_n_layers")) self.model_arch = gguf.MODEL_ARCH.NOMIC_BERT_MOE if self.is_moe else gguf.MODEL_ARCH.NOMIC_BERT @@ -4990,13 +5060,12 @@ class Gemma3VisionModel(MmprojModel): self.gguf_writer.add_vision_projector_scale_factor(proj_scale_factor) def tensor_force_quant(self, name, new_name, bid, n_dims): - del bid, new_name, n_dims # unused # related to https://github.com/ggml-org/llama.cpp/issues/13025 if "input_projection" in name: return gguf.GGMLQuantizationType.F16 if ".embeddings." in name: return gguf.GGMLQuantizationType.F32 - return False + return super().tensor_force_quant(name, new_name, bid, n_dims) def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]: del bid # unused @@ -7655,10 +7724,9 @@ class WhisperEncoderModel(MmprojModel): self.gguf_writer.add_audio_attention_layernorm_eps(self.hparams.get("layer_norm_eps", 1e-5)) def tensor_force_quant(self, name, new_name, bid, n_dims): - del bid, new_name, n_dims # unused if ".conv" in name and ".weight" in name: return gguf.GGMLQuantizationType.F16 - return False + return super().tensor_force_quant(name, new_name, bid, n_dims) def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]: del bid # unused @@ -8179,8 +8247,7 @@ class GptOssModel(TextModel): self.gguf_writer.add_rope_scaling_orig_ctx_len(rope_scaling.get("original_max_position_embeddings", 4096)) -@ModelBase.register("Lfm2ForCausalLM") -@ModelBase.register("LFM2ForCausalLM") +@ModelBase.register("Lfm2ForCausalLM", "LFM2ForCausalLM") class LFM2Model(TextModel): model_arch = gguf.MODEL_ARCH.LFM2 @@ -8215,6 +8282,13 @@ class LFM2Model(TextModel): self._add_feed_forward_length() def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]: + is_vision_tensor = "vision_tower" in name or "multi_modal_projector" in name + if is_vision_tensor: + # skip vision tensors + return [] + + name = name.replace("language_model.", "") + # conv op requires 2d tensor if 'conv.conv' in name: data_torch = data_torch.squeeze(1) @@ -8222,6 +8296,41 @@ class LFM2Model(TextModel): return [(self.map_tensor_name(name), data_torch)] +@ModelBase.register("Lfm2VlForConditionalGeneration") +class LFM2VLModel(MmprojModel): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + assert self.hparams_vision is not None + # TODO(tarek): for dynamic resolution image_size is not specified, setting here for compatibility + self.hparams_vision["image_size"] = 256 + + def set_gguf_parameters(self): + super().set_gguf_parameters() + self.gguf_writer.add_clip_projector_type(gguf.VisionProjectorType.LFM2) + self.gguf_writer.add_vision_attention_layernorm_eps(self.find_vparam(["layer_norm_eps"])) + self.gguf_writer.add_vision_projector_scale_factor(self.global_config.get("downsample_factor", 2)) + self.gguf_writer.add_vision_use_gelu(True) + # python notation, e.g. for vision_feature_layer == -1, we pick last layer -> vision_feature_layers_to_drop = 0 + vision_feature_layers_to_drop = -(self.global_config.get("vision_feature_layer", -1) + 1) + self.gguf_writer.add_vision_block_count(self.find_vparam(self.n_block_keys) - vision_feature_layers_to_drop) + + def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]: + del bid # unused + is_vision_tensor = "vision_tower" in name or "multi_modal_projector" in name + + if is_vision_tensor: + # remove "model." prefix + name = name.replace("model.vision_tower.", "vision_tower.") + name = name.replace("model.multi_modal_projector.", "multi_modal_projector.") + + if "patch_embedding.weight" in name: + data_torch = data_torch.view(data_torch.shape[0], 16, 16, 3).permute(0, 3, 1, 2) + + return [(self.map_tensor_name(name), data_torch)] + + return [] # skip other tensors + + @ModelBase.register("SmallThinkerForCausalLM") class SmallThinkerModel(TextModel): model_arch = gguf.MODEL_ARCH.SMALLTHINKER @@ -8304,6 +8413,77 @@ class SmallThinkerModel(TextModel): if len(experts) > 0: raise ValueError(f"Unprocessed experts: {experts}") + +class MistralModel(LlamaModel): + model_arch = gguf.MODEL_ARCH.LLAMA + model_name = "Mistral" + hf_arch = "" + is_mistral_format = True + undo_permute = False + + @staticmethod + def get_community_chat_template(vocab: MistralVocab, templates_dir: Path): + assert TokenizerVersion is not None, "mistral_common is not installed" + assert isinstance(vocab.tokenizer, (Tekkenizer, SentencePieceTokenizer)), ( + f"Expected Tekkenizer or SentencePieceTokenizer, got {type(vocab.tokenizer)}" + ) + + if vocab.tokenizer.version == TokenizerVersion.v1: + return "mistral-v1" + elif vocab.tokenizer.version == TokenizerVersion.v3 and vocab.tokenizer_type == MistralTokenizerType.spm: + return "mistral-v3" + elif vocab.tokenizer.version == TokenizerVersion.v3 and vocab.tokenizer_type == MistralTokenizerType.tekken: + return "mistral-v3-tekken" + elif vocab.tokenizer.version == TokenizerVersion.v7 and vocab.tokenizer_type == MistralTokenizerType.spm: + return "mistral-v7" + elif vocab.tokenizer.version == TokenizerVersion.v7 and vocab.tokenizer_type == MistralTokenizerType.tekken: + return "mistral-v7-tekken" + elif vocab.tokenizer.version == TokenizerVersion.v11: + template_file = "Mistral-Small-3.2-24B-Instruct-2506.jinja" + elif vocab.tokenizer.version == TokenizerVersion.v13: + template_file = "unsloth-mistral-Devstral-Small-2507.jinja" + else: + raise ValueError(f"Unknown tokenizer type: {vocab.tokenizer_type} and version {vocab.tokenizer.version}") + + template_path = templates_dir / template_file + if not template_path.exists(): + raise FileNotFoundError(f"Template file not found: {template_path}") + + with open(template_path, "r", encoding="utf-8") as f: + template = f.read() + + return template + + +class PixtralModel(LlavaVisionModel): + model_name = "Pixtral" + hf_arch = "" + is_mistral_format = True + + def set_gguf_parameters(self): + super().set_gguf_parameters() + self.gguf_writer.add_clip_projector_type(gguf.VisionProjectorType.PIXTRAL) + + self.gguf_writer.add_vision_attention_layernorm_eps( + self.find_hparam(["norm_eps"]) + ) + self.gguf_writer.add_rope_freq_base(self.find_vparam(["rope_theta"])) + + self.gguf_writer.add_vision_use_silu(True) + + # spatial_merge_size + if self.find_vparam(["mm_projector_id"]) == "patch_merge": + self.gguf_writer.add_vision_spatial_merge_size( + self.find_vparam(["spatial_merge_size"]) + ) + + def map_tensor_name(self, name: str, try_suffixes: Sequence[str] = (".weight", ".bias")) -> str: + if name == "vision_language_adapter.w_in.weight": + return "mm.1.weight" + elif name == "vision_language_adapter.w_out.weight": + return "mm.2.weight" + return super().map_tensor_name(name, try_suffixes) + ###### CONVERSION LOGIC ###### @@ -8454,6 +8634,10 @@ def parse_args() -> argparse.Namespace: "--mmproj", action="store_true", help="(Experimental) Export multimodal projector (mmproj) for vision models. This will only work on some vision models. A prefix 'mmproj-' will be added to the output file name.", ) + parser.add_argument( + "--mistral-format", action="store_true", + help="Whether the model is stored following the Mistral format.", + ) args = parser.parse_args() if not args.print_supported_models and args.model is None: @@ -8559,17 +8743,25 @@ def main() -> None: if "mmproj" not in fname_out.name: fname_out = ModelBase.add_prefix_to_filename(fname_out, "mmproj-") + is_mistral_format = args.mistral_format + with torch.inference_mode(): output_type = ftype_map[args.outtype] model_type = ModelType.MMPROJ if args.mmproj else ModelType.TEXT - hparams = ModelBase.load_hparams(dir_model) - model_architecture = get_model_architecture(hparams, model_type) - logger.info(f"Model architecture: {model_architecture}") - try: - model_class = ModelBase.from_model_architecture(model_architecture, model_type=model_type) - except NotImplementedError: - logger.error(f"Model {model_architecture} is not supported") - sys.exit(1) + hparams = ModelBase.load_hparams(dir_model, is_mistral_format) + if not is_mistral_format: + model_architecture = get_model_architecture(hparams, model_type) + logger.info(f"Model architecture: {model_architecture}") + try: + model_class = ModelBase.from_model_architecture(model_architecture, model_type=model_type) + except NotImplementedError: + logger.error(f"Model {model_architecture} is not supported") + sys.exit(1) + elif args.mmproj: + assert hparams.get("vision_encoder") is not None, "This model does not support multimodal" + model_class = PixtralModel + else: + model_class = MistralModel model_instance = model_class(dir_model, output_type, fname_out, is_big_endian=args.bigendian, use_temp_file=args.use_temp_file, @@ -8578,7 +8770,8 @@ def main() -> None: split_max_tensors=args.split_max_tensors, split_max_size=split_str_to_n_bytes(args.split_max_size), dry_run=args.dry_run, small_first_shard=args.no_tensor_first_split, - remote_hf_model_id=hf_repo_id) + remote_hf_model_id=hf_repo_id, + ) if args.vocab_only: logger.info("Exporting model vocab...") diff --git a/convert_lora_to_gguf.py b/convert_lora_to_gguf.py index 00a6733cbd..a67c0536a4 100755 --- a/convert_lora_to_gguf.py +++ b/convert_lora_to_gguf.py @@ -340,7 +340,7 @@ if __name__ == '__main__': sys.exit(1) else: logger.info(f"Loading base model: {dir_base_model.name}") - hparams = ModelBase.load_hparams(dir_base_model) + hparams = ModelBase.load_hparams(dir_base_model, False) with torch.inference_mode(): try: diff --git a/docs/build-s390x.md b/docs/build-s390x.md index 4d5857753a..b36a199814 100644 --- a/docs/build-s390x.md +++ b/docs/build-s390x.md @@ -76,6 +76,23 @@ cmake --build build --config Release -j $(nproc) cmake --build build --config Release -j $(nproc) ``` +## IBM zDNN Accelerator + +This provides acceleration using the IBM zAIU co-processor located in the Telum I and Telum II processors. Make sure to have the [IBM zDNN library](https://github.com/IBM/zDNN) installed. + +#### Compile from source from IBM + +You may find the official build instructions here: [Building and Installing zDNN](https://github.com/IBM/zDNN?tab=readme-ov-file#building-and-installing-zdnn) + +### Compilation + +```bash +cmake -S . -B build \ + -DCMAKE_BUILD_TYPE=Release \ + -DGGML_ZDNN=ON +cmake --build build --config Release -j$(nproc) +``` + ## Getting GGUF Models All models need to be converted to Big-Endian. You can achieve this in three cases: @@ -145,15 +162,15 @@ All models need to be converted to Big-Endian. You can achieve this in three cas ### 1. SIMD Acceleration -Only available in IBM z15 or later system with the `-DGGML_VXE=ON` (turned on by default) compile flag. No hardware acceleration is possible with llama.cpp with older systems, such as IBM z14/arch12. In such systems, the APIs can still run but will use a scalar implementation. +Only available in IBM z15/LinuxONE 3 or later system with the `-DGGML_VXE=ON` (turned on by default) compile flag. No hardware acceleration is possible with llama.cpp with older systems, such as IBM z14/arch12. In such systems, the APIs can still run but will use a scalar implementation. ### 2. NNPA Vector Intrinsics Acceleration -Only available in IBM z16 or later system with the `-DGGML_NNPA=ON` (turned off by default) compile flag. No hardware acceleration is possible with llama.cpp with older systems, such as IBM z15/arch13. In such systems, the APIs can still run but will use a scalar implementation. +Only available in IBM z16/LinuxONE 4 or later system with the `-DGGML_NNPA=ON` (turned off by default) compile flag. No hardware acceleration is possible with llama.cpp with older systems, such as IBM z15/arch13. In such systems, the APIs can still run but will use a scalar implementation. -### 3. zDNN Accelerator +### 3. zDNN Accelerator (WIP) -_Only available in IBM z16 / LinuxONE 4 or later system. No support currently available._ +Only available in IBM z17/LinuxONE 5 or later system with the `-DGGML_ZDNN=ON` compile flag. No hardware acceleration is possible with llama.cpp with older systems, such as IBM z15/arch13. In such systems, the APIs will default back to CPU routines. ### 4. Spyre Accelerator @@ -229,11 +246,12 @@ IBM VXE/VXE2 SIMD acceleration depends on the BLAS implementation. It is strongl ## Appendix A: Hardware Support Matrix -| | Support | Minimum Compiler Version | -| ------- | ------- | ------------------------ | -| IBM z15 | ✅ | | -| IBM z16 | ✅ | | -| IBM z17 | ✅ | GCC 15.1.0 | +| | Support | Minimum Compiler Version | +| -------- | ------- | ------------------------ | +| IBM z15 | ✅ | | +| IBM z16 | ✅ | | +| IBM z17 | ✅ | GCC 15.1.0 | +| IBM zDNN | ✅ | | - ✅ - supported and verified to run as intended - 🚫 - unsupported, we are unlikely able to provide support @@ -242,7 +260,7 @@ IBM VXE/VXE2 SIMD acceleration depends on the BLAS implementation. It is strongl | | VX/VXE/VXE2 | NNPA | zDNN | Spyre | | ---------- | ----------- | ---- | ---- | ----- | -| FP32 | ✅ | ✅ | ❓ | ❓ | +| FP32 | ✅ | ✅ | ✅ | ❓ | | FP16 | ✅ | ✅ | ❓ | ❓ | | BF16 | 🚫 | 🚫 | ❓ | ❓ | | Q4_0 | ✅ | ✅ | ❓ | ❓ | @@ -273,4 +291,4 @@ IBM VXE/VXE2 SIMD acceleration depends on the BLAS implementation. It is strongl - 🚫 - acceleration unavailable, will still run using scalar implementation - ❓ - acceleration unknown, please contribute if you can test it yourself -Last Updated by **Aaron Teo (aaron.teo1@ibm.com)** on July 25, 2025. +Last Updated by **Aaron Teo (aaron.teo1@ibm.com)** on July 31, 2025. diff --git a/docs/multimodal/minicpmo2.6.md b/docs/multimodal/minicpmo2.6.md index d4edc97182..5e74058e5d 100644 --- a/docs/multimodal/minicpmo2.6.md +++ b/docs/multimodal/minicpmo2.6.md @@ -13,7 +13,7 @@ If there are differences in usage, please refer to the official build [documenta Clone llama.cpp: ```bash -git clone https://github.com/ggerganov/llama.cpp +git clone https://github.com/ggml-org/llama.cpp cd llama.cpp ``` diff --git a/docs/multimodal/minicpmv2.6.md b/docs/multimodal/minicpmv2.6.md index 1b0ff59691..bc874bbd8c 100644 --- a/docs/multimodal/minicpmv2.6.md +++ b/docs/multimodal/minicpmv2.6.md @@ -12,7 +12,7 @@ If there are differences in usage, please refer to the official build [documenta Clone llama.cpp: ```bash -git clone https://github.com/ggerganov/llama.cpp +git clone https://github.com/ggml-org/llama.cpp cd llama.cpp ``` diff --git a/docs/ops.md b/docs/ops.md index 1a474d9047..9a81ca0a97 100644 --- a/docs/ops.md +++ b/docs/ops.md @@ -12,91 +12,92 @@ Legend: - 🟡 Partially supported by this backend - ❌ Not supported by this backend -| Operation | BLAS | CANN | CPU | CUDA | Metal | OpenCL | SYCL | Vulkan | -|-----------|------|------|------|------|------|------|------|------| -| ABS | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | -| ACC | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | -| ADD | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ✅ | -| ADD1 | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ✅ | ❌ | -| ARANGE | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | -| ARGMAX | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | -| ARGSORT | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| CLAMP | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | 🟡 | -| CONCAT | ❌ | ✅ | ✅ | 🟡 | ✅ | 🟡 | 🟡 | ✅ | -| CONT | ❌ | 🟡 | ✅ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | -| CONV_2D | ❌ | ❌ | ✅ | ❌ | ❌ | ✅ | ❌ | ✅ | -| CONV_2D_DW | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | -| CONV_TRANSPOSE_1D | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | -| CONV_TRANSPOSE_2D | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | -| COS | ❌ | ✅ | ✅ | ✅ | 🟡 | ❌ | ✅ | 🟡 | -| COUNT_EQUAL | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | -| CPY | ❌ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | -| CROSS_ENTROPY_LOSS | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | -| CROSS_ENTROPY_LOSS_BACK | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | -| DIAG_MASK_INF | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ✅ | -| DIV | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ✅ | -| DUP | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | ✅ | 🟡 | -| ELU | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | -| EXP | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | -| FLASH_ATTN_EXT | ❌ | 🟡 | ✅ | 🟡 | 🟡 | ❌ | ❌ | 🟡 | -| GATED_LINEAR_ATTN | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ✅ | ❌ | -| GEGLU | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | -| GEGLU_ERF | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | -| GEGLU_QUICK | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | -| GELU | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | -| GELU_ERF | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | -| GELU_QUICK | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | -| GET_ROWS | ❌ | 🟡 | ✅ | 🟡 | ✅ | 🟡 | 🟡 | 🟡 | -| GET_ROWS_BACK | ❌ | ❌ | 🟡 | 🟡 | ❌ | ❌ | ❌ | ❌ | -| GROUP_NORM | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| HARDSIGMOID | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | -| HARDSWISH | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | -| IM2COL | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | ✅ | -| L2_NORM | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | -| LEAKY_RELU | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | -| LOG | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ✅ | ❌ | -| MEAN | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | -| MUL | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ✅ | -| MUL_MAT | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | -| MUL_MAT_ID | ❌ | 🟡 | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | -| NEG | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | -| NORM | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | -| OPT_STEP_ADAMW | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | -| OUT_PROD | 🟡 | ❌ | 🟡 | 🟡 | ❌ | ❌ | 🟡 | ❌ | -| PAD | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| PAD_REFLECT_1D | ❌ | ✅ | ✅ | ❌ | ✅ | ❌ | ❌ | ❌ | -| POOL_2D | ❌ | 🟡 | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | -| REGLU | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | -| RELU | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | -| REPEAT | ❌ | ✅ | ✅ | 🟡 | ✅ | 🟡 | ✅ | 🟡 | -| REPEAT_BACK | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | -| RMS_NORM | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | ✅ | -| RMS_NORM_BACK | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | -| RMS_NORM_MUL_ADD | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| ROLL | ❌ | ❌ | ✅ | ❌ | ❌ | ❌ | ❌ | ✅ | -| ROPE | ❌ | 🟡 | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| ROPE_BACK | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | -| RWKV_WKV6 | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | -| RWKV_WKV7 | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | -| SCALE | ❌ | 🟡 | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| SET | ❌ | ❌ | ✅ | ❌ | ✅ | ❌ | ❌ | ❌ | -| SET_ROWS | ❌ | ❌ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | -| SGN | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | -| SIGMOID | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | -| SILU | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | -| SILU_BACK | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | -| SIN | ❌ | ✅ | ✅ | ✅ | 🟡 | ❌ | ✅ | 🟡 | -| SOFT_MAX | ❌ | 🟡 | ✅ | ✅ | ✅ | ✅ | 🟡 | ✅ | -| SOFT_MAX_BACK | ❌ | ❌ | 🟡 | 🟡 | ❌ | ❌ | ❌ | ✅ | -| SQR | ❌ | ✅ | ✅ | ✅ | 🟡 | ❌ | ✅ | 🟡 | -| SQRT | ❌ | ✅ | ✅ | ✅ | 🟡 | ❌ | ✅ | ❌ | -| SSM_CONV | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | -| SSM_SCAN | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | -| STEP | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | -| SUB | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ✅ | -| SUM | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ✅ | ✅ | -| SUM_ROWS | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| SWIGLU | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | -| TANH | ❌ | ✅ | ✅ | 🟡 | 🟡 | ✅ | 🟡 | 🟡 | -| TIMESTEP_EMBEDDING | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| UPSCALE | ❌ | 🟡 | ✅ | ✅ | 🟡 | ✅ | 🟡 | ✅ | +| Operation | BLAS | CANN | CPU | CUDA | Metal | OpenCL | SYCL | Vulkan | zDNN | +|-----------|------|------|------|------|------|------|------|------|------| +| ABS | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | ❌ | +| ACC | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ❌ | +| ADD | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ✅ | ❌ | +| ADD1 | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ✅ | ❌ | ❌ | +| ARANGE | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | +| ARGMAX | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ❌ | +| ARGSORT | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | +| CLAMP | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | 🟡 | ❌ | +| CONCAT | ❌ | ✅ | ✅ | 🟡 | ✅ | 🟡 | 🟡 | ✅ | ❌ | +| CONT | ❌ | 🟡 | ✅ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | ❌ | +| CONV_2D | ❌ | ❌ | ✅ | ❌ | ❌ | ✅ | ❌ | ✅ | ❌ | +| CONV_2D_DW | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | +| CONV_TRANSPOSE_1D | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ❌ | +| CONV_TRANSPOSE_2D | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | +| COS | ❌ | ✅ | ✅ | ✅ | 🟡 | ❌ | ✅ | 🟡 | ❌ | +| COUNT_EQUAL | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | +| CPY | ❌ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | ❌ | +| CROSS_ENTROPY_LOSS | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | +| CROSS_ENTROPY_LOSS_BACK | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | +| DIAG_MASK_INF | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ✅ | ❌ | +| DIV | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ✅ | ❌ | +| DUP | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | ✅ | 🟡 | ❌ | +| ELU | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | ❌ | +| EXP | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | ❌ | +| FLASH_ATTN_EXT | ❌ | 🟡 | ✅ | 🟡 | 🟡 | ❌ | ❌ | 🟡 | ❌ | +| GATED_LINEAR_ATTN | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ✅ | ❌ | ❌ | +| GEGLU | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | ❌ | +| GEGLU_ERF | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | ❌ | +| GEGLU_QUICK | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | ❌ | +| GELU | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | ❌ | +| GELU_ERF | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | ❌ | +| GELU_QUICK | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | ❌ | +| GET_ROWS | ❌ | 🟡 | ✅ | 🟡 | ✅ | 🟡 | 🟡 | 🟡 | ❌ | +| GET_ROWS_BACK | ❌ | ❌ | 🟡 | 🟡 | ❌ | ❌ | ❌ | ❌ | ❌ | +| GROUP_NORM | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | +| HARDSIGMOID | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | ❌ | +| HARDSWISH | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | ❌ | +| IM2COL | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | ✅ | ❌ | +| L2_NORM | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ❌ | +| LEAKY_RELU | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ❌ | +| LOG | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ✅ | ❌ | ❌ | +| MEAN | ❌ | ✅ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | +| MUL | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ✅ | ❌ | +| MUL_MAT | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | +| MUL_MAT_ID | ❌ | 🟡 | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ❌ | +| NEG | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | ❌ | +| NORM | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | ❌ | +| OPT_STEP_ADAMW | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | +| OUT_PROD | 🟡 | ❌ | 🟡 | 🟡 | ❌ | ❌ | 🟡 | ❌ | ❌ | +| PAD | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | +| PAD_REFLECT_1D | ❌ | ✅ | ✅ | ❌ | ✅ | ❌ | ❌ | ❌ | ❌ | +| POOL_2D | ❌ | 🟡 | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ❌ | +| REGLU | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | ❌ | +| RELU | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | ❌ | +| REPEAT | ❌ | ✅ | ✅ | 🟡 | ✅ | 🟡 | ✅ | 🟡 | ❌ | +| REPEAT_BACK | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | +| RMS_NORM | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | ✅ | ❌ | +| RMS_NORM_BACK | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | +| RMS_NORM_MUL_ADD | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | +| ROLL | ❌ | ❌ | ✅ | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | +| ROPE | ❌ | 🟡 | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | +| ROPE_BACK | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | +| RWKV_WKV6 | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ❌ | +| RWKV_WKV7 | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ❌ | +| SCALE | ❌ | 🟡 | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | +| SET | ❌ | ❌ | ✅ | ❌ | ✅ | ❌ | ❌ | ❌ | ❌ | +| SET_ROWS | ❌ | ❌ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | ❌ | +| SGN | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | ❌ | +| SIGMOID | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | ❌ | +| SILU | ❌ | ✅ | ✅ | 🟡 | 🟡 | 🟡 | 🟡 | 🟡 | ❌ | +| SILU_BACK | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | +| SIN | ❌ | ✅ | ✅ | ✅ | 🟡 | ❌ | ✅ | 🟡 | ❌ | +| SOFTCAP | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| SOFT_MAX | ❌ | 🟡 | ✅ | ✅ | ✅ | ✅ | 🟡 | ✅ | ❌ | +| SOFT_MAX_BACK | ❌ | ❌ | 🟡 | 🟡 | ❌ | ❌ | ❌ | ✅ | ❌ | +| SQR | ❌ | ✅ | ✅ | ✅ | 🟡 | ❌ | ✅ | 🟡 | ❌ | +| SQRT | ❌ | ✅ | ✅ | ✅ | 🟡 | ❌ | ✅ | ❌ | ❌ | +| SSM_CONV | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | +| SSM_SCAN | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | +| STEP | ❌ | ✅ | ✅ | 🟡 | 🟡 | ❌ | 🟡 | ❌ | ❌ | +| SUB | ❌ | ✅ | ✅ | ✅ | 🟡 | 🟡 | ✅ | ✅ | ❌ | +| SUM | ❌ | ✅ | ✅ | ✅ | ❌ | ❌ | ✅ | ✅ | ❌ | +| SUM_ROWS | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | +| SWIGLU | ❌ | ✅ | ✅ | ✅ | 🟡 | ✅ | ✅ | 🟡 | ❌ | +| TANH | ❌ | ✅ | ✅ | 🟡 | 🟡 | ✅ | 🟡 | 🟡 | ❌ | +| TIMESTEP_EMBEDDING | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | +| UPSCALE | ❌ | 🟡 | ✅ | ✅ | 🟡 | ✅ | 🟡 | ✅ | ❌ | diff --git a/docs/ops/zDNN.csv b/docs/ops/zDNN.csv new file mode 100644 index 0000000000..bf633844f5 --- /dev/null +++ b/docs/ops/zDNN.csv @@ -0,0 +1,8134 @@ +"backend_name","op_name","op_params","test_mode","supported","error_message","backend_reg_name" +"zDNN","ABS","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","ABS","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","SGN","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","SGN","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","NEG","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","NEG","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","STEP","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","STEP","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","TANH","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","TANH","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","ELU","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","ELU","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","RELU","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","RELU","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","SIGMOID","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","SIGMOID","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","GELU","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","GELU","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","GELU_QUICK","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","GELU_QUICK","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","SILU","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","SILU","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","HARDSWISH","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","HARDSWISH","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","HARDSIGMOID","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","HARDSIGMOID","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","EXP","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","EXP","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","GELU_ERF","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","GELU_ERF","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","ABS","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","ABS","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","SGN","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","SGN","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","NEG","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","NEG","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","STEP","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","STEP","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","TANH","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","TANH","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","ELU","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","ELU","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","RELU","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","RELU","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","SIGMOID","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","SIGMOID","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","GELU","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","GELU","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","GELU_QUICK","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","GELU_QUICK","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","SILU","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","SILU","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","HARDSWISH","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","HARDSWISH","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","HARDSIGMOID","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","HARDSIGMOID","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","EXP","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","EXP","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","GELU_ERF","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","GELU_ERF","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","ABS","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","ABS","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","SGN","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","SGN","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","NEG","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","NEG","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","STEP","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","STEP","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","TANH","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","TANH","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","ELU","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","ELU","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","RELU","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","RELU","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","SIGMOID","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","SIGMOID","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","GELU","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","GELU","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","GELU_QUICK","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","GELU_QUICK","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","SILU","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","SILU","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","HARDSWISH","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","HARDSWISH","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","HARDSIGMOID","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","HARDSIGMOID","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","EXP","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","EXP","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","GELU_ERF","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","zDNN" +"zDNN","GELU_ERF","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","zDNN" +"zDNN","ABS","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","ABS","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","SGN","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","SGN","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","NEG","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","NEG","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","STEP","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","STEP","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","TANH","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","TANH","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","ELU","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","ELU","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","RELU","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","RELU","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","SIGMOID","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","SIGMOID","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","GELU","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","GELU","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","GELU_QUICK","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","GELU_QUICK","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","SILU","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","SILU","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","HARDSWISH","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","HARDSWISH","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","HARDSIGMOID","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","HARDSIGMOID","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","EXP","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","EXP","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","GELU_ERF","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","zDNN" +"zDNN","GELU_ERF","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[128,2,2,2],v=0,swapped=0","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[5,7,11,13],v=0,swapped=0","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[128,2,2,2],v=0,swapped=1","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[5,7,11,13],v=0,swapped=1","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[128,2,2,2],v=0,split","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[5,7,11,13],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[128,2,2,2],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[5,7,11,13],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[128,2,2,2],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[5,7,11,13],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[128,2,2,2],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[5,7,11,13],v=0,split","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[128,2,2,2],v=0,swapped=0","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[5,7,11,13],v=0,swapped=0","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[128,2,2,2],v=0,swapped=1","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[5,7,11,13],v=0,swapped=1","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[128,2,2,2],v=0,split","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[5,7,11,13],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[128,2,2,2],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[5,7,11,13],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[128,2,2,2],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[5,7,11,13],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[128,2,2,2],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[5,7,11,13],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[128,2,2,2],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[5,7,11,13],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[128,2,2,2],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[5,7,11,13],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[128,2,2,2],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[5,7,11,13],v=0,split","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[128,2,2,2],v=1,swapped=0","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[5,7,11,13],v=1,swapped=0","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[128,2,2,2],v=1,swapped=1","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[5,7,11,13],v=1,swapped=1","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[128,2,2,2],v=1,split","support","0","no","zDNN" +"zDNN","REGLU","type=f16,ne_a=[5,7,11,13],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[128,2,2,2],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[5,7,11,13],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[128,2,2,2],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[5,7,11,13],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[128,2,2,2],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU","type=f16,ne_a=[5,7,11,13],v=1,split","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[128,2,2,2],v=1,swapped=0","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[5,7,11,13],v=1,swapped=0","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[128,2,2,2],v=1,swapped=1","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[5,7,11,13],v=1,swapped=1","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[128,2,2,2],v=1,split","support","0","no","zDNN" +"zDNN","SWIGLU","type=f16,ne_a=[5,7,11,13],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[128,2,2,2],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[5,7,11,13],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[128,2,2,2],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[5,7,11,13],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[128,2,2,2],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f16,ne_a=[5,7,11,13],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[128,2,2,2],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[5,7,11,13],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[128,2,2,2],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[5,7,11,13],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[128,2,2,2],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f16,ne_a=[5,7,11,13],v=1,split","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[128,2,2,2],v=0,swapped=0","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[5,7,11,13],v=0,swapped=0","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[128,2,2,2],v=0,swapped=1","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[5,7,11,13],v=0,swapped=1","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[128,2,2,2],v=0,split","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[5,7,11,13],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[128,2,2,2],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[5,7,11,13],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[128,2,2,2],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[5,7,11,13],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[128,2,2,2],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[5,7,11,13],v=0,split","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[128,2,2,2],v=0,swapped=0","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[5,7,11,13],v=0,swapped=0","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[128,2,2,2],v=0,swapped=1","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[5,7,11,13],v=0,swapped=1","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[128,2,2,2],v=0,split","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[5,7,11,13],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[128,2,2,2],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[5,7,11,13],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[128,2,2,2],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[5,7,11,13],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[128,2,2,2],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[5,7,11,13],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[128,2,2,2],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[5,7,11,13],v=0,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[128,2,2,2],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[5,7,11,13],v=0,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[128,2,2,2],v=0,split","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[5,7,11,13],v=0,split","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[128,2,2,2],v=1,swapped=0","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[5,7,11,13],v=1,swapped=0","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[128,2,2,2],v=1,swapped=1","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[5,7,11,13],v=1,swapped=1","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[128,2,2,2],v=1,split","support","0","no","zDNN" +"zDNN","REGLU","type=f32,ne_a=[5,7,11,13],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[128,2,2,2],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[5,7,11,13],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[128,2,2,2],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[5,7,11,13],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[128,2,2,2],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU","type=f32,ne_a=[5,7,11,13],v=1,split","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[128,2,2,2],v=1,swapped=0","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[5,7,11,13],v=1,swapped=0","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[128,2,2,2],v=1,swapped=1","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[5,7,11,13],v=1,swapped=1","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[128,2,2,2],v=1,split","support","0","no","zDNN" +"zDNN","SWIGLU","type=f32,ne_a=[5,7,11,13],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[128,2,2,2],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[5,7,11,13],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[128,2,2,2],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[5,7,11,13],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[128,2,2,2],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU_ERF","type=f32,ne_a=[5,7,11,13],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[128,2,2,2],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[5,7,11,13],v=1,swapped=0","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[128,2,2,2],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[5,7,11,13],v=1,swapped=1","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[128,2,2,2],v=1,split","support","0","no","zDNN" +"zDNN","GEGLU_QUICK","type=f32,ne_a=[5,7,11,13],v=1,split","support","0","no","zDNN" +"zDNN","GET_ROWS","type=f32,n=1,m=8,r=2,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=f32,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=f32,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=f32,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=f32,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=f16,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=f16,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=f16,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=f16,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=bf16,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=bf16,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=bf16,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=bf16,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_0,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_0,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_0,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_0,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_1,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_1,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_1,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_1,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_0,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_0,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_0,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_0,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_1,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_1,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_1,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_1,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q8_0,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q8_0,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q8_0,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q8_0,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q2_K,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q2_K,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q2_K,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q2_K,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q3_K,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q3_K,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q3_K,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q3_K,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_K,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_K,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_K,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q4_K,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_K,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_K,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_K,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q5_K,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q6_K,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q6_K,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q6_K,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=q6_K,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_xxs,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_xxs,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_xxs,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_xxs,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_xs,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_xs,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_xs,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_xs,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_s,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_s,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_s,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq2_s,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq3_xxs,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq3_xxs,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq3_xxs,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq3_xxs,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq1_s,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq1_s,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq1_s,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq1_s,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq1_m,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq1_m,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq1_m,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq1_m,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq4_nl,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq4_nl,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq4_nl,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq4_nl,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq3_s,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq3_s,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq3_s,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq3_s,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq4_xs,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq4_xs,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq4_xs,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=iq4_xs,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=i32,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=i32,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS","type=i32,n=256,m=5,r=4,b=7,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS","type=i32,n=256,m=5,r=4,b=7,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=f32,n=1,m=8,r=2,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=f32,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=f32,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=f16,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=f16,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=bf16,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=bf16,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q4_0,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q4_0,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q4_1,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q4_1,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q5_0,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q5_0,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q5_1,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q5_1,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q8_0,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q8_0,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q2_K,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q2_K,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q3_K,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q3_K,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q4_K,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q4_K,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q5_K,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q5_K,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q6_K,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=q6_K,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq2_xxs,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq2_xxs,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq2_xs,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq2_xs,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq2_s,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq2_s,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq3_xxs,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq3_xxs,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq1_s,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq1_s,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq1_m,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq1_m,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq4_nl,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq4_nl,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq3_s,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq3_s,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq4_xs,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=iq4_xs,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=i32,n=256,m=5,r=4,b=1,v=0","support","0","no","zDNN" +"zDNN","GET_ROWS_BACK","type=i32,n=256,m=5,r=4,b=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[1,8,1,3],nr23=[1,1],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[3,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[31,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[33,5,1,1],nr23=[2,3],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[3,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[31,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[33,5,1,1],nr23=[2,3],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[3,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[31,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[33,5,1,7],nr23=[2,3],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[3,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[31,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f32,ne=[33,5,1,7],nr23=[2,3],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[3,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[31,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[33,5,1,1],nr23=[2,3],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[3,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[31,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[33,5,1,1],nr23=[2,3],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[3,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[31,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[33,5,1,7],nr23=[2,3],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[3,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[31,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=f16,ne=[33,5,1,7],nr23=[2,3],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[3,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[31,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[33,5,1,1],nr23=[2,3],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[3,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[31,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[33,5,1,1],nr23=[2,3],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[3,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[31,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[33,5,1,7],nr23=[2,3],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[3,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[31,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=bf16,ne=[33,5,1,7],nr23=[2,3],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[96,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[96,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[96,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_0,ne=[96,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[96,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[96,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[96,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_1,ne=[96,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[96,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[96,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[96,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_0,ne=[96,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[96,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[96,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[96,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_1,ne=[96,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[96,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[96,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[96,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q8_0,ne=[96,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q2_K,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q3_K,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q4_K,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q5_K,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=q6_K,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xxs,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_xs,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq2_s,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_xxs,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_s,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq1_m,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[96,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[96,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[96,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_nl,ne=[96,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq3_s,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[256,5,1,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[256,11,1,1],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[768,3,1,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[256,5,1,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[256,11,1,1],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[768,3,1,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[256,5,7,3],nr23=[1,1],r=1,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[256,11,1,7],nr23=[2,3],r=7,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[768,3,7,1],nr23=[2,3],r=2,v=0","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[256,5,7,3],nr23=[1,1],r=1,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[256,11,1,7],nr23=[2,3],r=7,v=1","support","0","no","zDNN" +"zDNN","SET_ROWS","type=iq4_xs,ne=[768,3,7,1],nr23=[2,3],r=2,v=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=avg,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=1,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=1,s0=2,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=1,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=1,k1=3,s0=2,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=1,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=1,s0=2,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=1,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=1,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=1,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=1,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=1,p0=1,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=2,p0=0,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=2,p0=0,p1=1","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=2,p0=1,p1=0","support","0","no","zDNN" +"zDNN","POOL_2D","pool_type=max,type_input=f32,ne_input=[10,10,3,1],k0=3,k1=3,s0=2,s1=2,p0=1,p1=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[3000,128,1,1],ne_kernel=[3,128,1280,1],s0=1,s1=0,p0=1,p1=0,d0=1,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f32,ne_input=[3000,128,1,1],ne_kernel=[3,128,1280,1],s0=1,s1=0,p0=1,p1=0,d0=1,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[3000,128,1,1],ne_kernel=[3,128,1280,1],s0=1,s1=0,p0=1,p1=0,d0=1,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,2,2,1],ne_kernel=[3,2,2,1],s0=1,s1=0,p0=0,p1=0,d0=1,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,2,2,1],ne_kernel=[3,2,2,1],s0=1,s1=0,p0=0,p1=0,d0=3,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,2,2,1],ne_kernel=[3,2,2,1],s0=1,s1=0,p0=3,p1=0,d0=1,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,2,2,1],ne_kernel=[3,2,2,1],s0=1,s1=0,p0=3,p1=0,d0=3,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,2,2,1],ne_kernel=[3,2,2,1],s0=3,s1=0,p0=0,p1=0,d0=1,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,2,2,1],ne_kernel=[3,2,2,1],s0=3,s1=0,p0=0,p1=0,d0=3,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,2,2,1],ne_kernel=[3,2,2,1],s0=3,s1=0,p0=3,p1=0,d0=1,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,2,2,1],ne_kernel=[3,2,2,1],s0=3,s1=0,p0=3,p1=0,d0=3,d1=0,is_2D=0","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[10,10,3,1],ne_kernel=[3,3,3,1],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f32,ne_input=[10,10,3,1],ne_kernel=[3,3,3,1],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[10,10,3,1],ne_kernel=[3,3,3,1],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=0,p1=0,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=0,p1=0,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=0,p1=0,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=0,p1=0,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=0,p1=3,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=0,p1=3,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=0,p1=3,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=0,p1=3,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=3,p1=0,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=3,p1=0,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=3,p1=0,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=3,p1=0,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=3,p1=3,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=3,p1=3,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=3,p1=3,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=1,p0=3,p1=3,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=0,p1=0,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=0,p1=0,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=0,p1=0,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=0,p1=0,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=0,p1=3,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=0,p1=3,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=0,p1=3,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=0,p1=3,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=3,p1=0,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=3,p1=0,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=3,p1=0,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=3,p1=0,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=3,p1=3,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=3,p1=3,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=3,p1=3,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=1,s1=3,p0=3,p1=3,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=0,p1=0,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=0,p1=0,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=0,p1=0,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=0,p1=0,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=0,p1=3,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=0,p1=3,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=0,p1=3,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=0,p1=3,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=3,p1=0,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=3,p1=0,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=3,p1=0,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=3,p1=0,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=3,p1=3,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=3,p1=3,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=3,p1=3,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=1,p0=3,p1=3,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=0,p1=0,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=0,p1=0,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=0,p1=0,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=0,p1=0,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=0,p1=3,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=0,p1=3,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=0,p1=3,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=0,p1=3,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=3,p1=0,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=3,p1=0,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=3,p1=0,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=3,p1=0,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=3,p1=3,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=3,p1=3,d0=1,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=3,p1=3,d0=3,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f32,dst_type=f32,ne_input=[20,20,2,2],ne_kernel=[3,3,2,2],s0=3,s1=3,p0=3,p1=3,d0=3,d1=3,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[12,12,1,32],ne_kernel=[3,3,1,32],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[12,12,2,32],ne_kernel=[3,3,2,32],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[12,12,1,1024],ne_kernel=[3,3,1,1024],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[12,12,2,1024],ne_kernel=[3,3,2,1024],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[12,12,1,2048],ne_kernel=[3,3,1,2048],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[12,12,2,2048],ne_kernel=[3,3,2,2048],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[12,12,1,2560],ne_kernel=[3,3,1,2560],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[12,12,2,2560],ne_kernel=[3,3,2,2560],s0=1,s1=1,p0=1,p1=1,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","IM2COL","type_input=f32,type_kernel=f16,dst_type=f16,ne_input=[5,5,1,32],ne_kernel=[3,4,1,32],s0=1,s1=1,p0=0,p1=0,d0=1,d1=1,is_2D=1","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,1],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,1],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,12],type_kernel=f32,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,12],type_kernel=f16,stride0=1,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=2,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,1,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,2,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,1,2],ne_kernel=[11,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,3,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[1,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[2,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[3,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,1,2],ne_kernel=[11,11,1,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,1],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,1],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,1,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,2,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,1,25,2],ne_kernel=[11,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,3,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[1,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[2,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[1,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[3,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,12],type_kernel=f32,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D","ne_input=[141,133,25,2],ne_kernel=[11,11,25,12],type_kernel=f16,stride0=3,stride1=5,padding0=5,padding1=5,dilation0=2,dilation1=4,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D_DW","ne_input=[17,34,9,1],ne_kernel=[3,3,1,9],stride=1,padding=0,dilation=1,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D_DW","ne_input=[17,34,9,1],ne_kernel=[3,3,1,9],stride=1,padding=0,dilation=1,cwhn=1","support","0","no","zDNN" +"zDNN","CONV_2D_DW","ne_input=[32,8,64,1],ne_kernel=[3,3,1,64],stride=2,padding=1,dilation=1,cwhn=0","support","0","no","zDNN" +"zDNN","CONV_2D_DW","ne_input=[32,8,64,1],ne_kernel=[3,3,1,64],stride=2,padding=1,dilation=1,cwhn=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1,1,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1,1,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1,1,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1,1,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1,1,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1,1,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1,1,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1,1,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1,1,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[3,1,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[3,1,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[3,1,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[3,1,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[3,1,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[3,1,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[3,1,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[3,1,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[3,1,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1337,1,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1337,1,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1337,1,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1337,1,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1337,1,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1337,1,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1337,1,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1337,1,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1337,1,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1,1,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1,1,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1,1,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1,1,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1,1,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1,1,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1,1,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1,1,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1,1,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[3,1,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[3,1,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[3,1,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[3,1,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[3,1,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[3,1,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[3,1,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[3,1,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[3,1,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1337,1,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1337,1,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1337,1,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1337,1,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1337,1,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1337,1,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1337,1,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1337,1,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1337,1,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1,9,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1,9,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1,9,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1,9,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1,9,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1,9,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1,9,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1,9,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1,9,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[3,9,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[3,9,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[3,9,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[3,9,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[3,9,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[3,9,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[3,9,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[3,9,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[3,9,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1337,9,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1337,9,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,1,1,1],ne_kernel=[1337,9,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1337,9,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1337,9,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[1337,9,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1337,9,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1337,9,1,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,1,1,1],ne_kernel=[1337,9,1,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1,9,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1,9,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1,9,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1,9,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1,9,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1,9,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1,9,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1,9,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1,9,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[3,9,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[3,9,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[3,9,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[3,9,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[3,9,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[3,9,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[3,9,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[3,9,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[3,9,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1337,9,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1337,9,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[1,7,1,1],ne_kernel=[1337,9,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1337,9,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1337,9,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,7,1,1],ne_kernel=[1337,9,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1337,9,7,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1337,9,7,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[13,7,1,1],ne_kernel=[1337,9,7,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[197,32,1,1],ne_kernel=[16,32,32,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[3,2,1,1],ne_kernel=[2,3,2,1],s0=3,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[3,2,1,1],ne_kernel=[2,3,2,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[3,2,1,1],ne_kernel=[2,3,2,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[3,2,1,1],ne_kernel=[3,2,2,1],s0=2,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[3,2,1,1],ne_kernel=[3,2,2,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[3,2,1,1],ne_kernel=[3,1,2,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_1D","ne_input=[2,1,1,1],ne_kernel=[3,1,1,1],s0=1,p0=0,d0=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_2D","ne_input=[3,2,3,1],ne_kernel=[2,2,1,3],stride=1","support","0","no","zDNN" +"zDNN","CONV_TRANSPOSE_2D","ne_input=[10,10,9,1],ne_kernel=[3,3,1,9],stride=2","support","0","no","zDNN" +"zDNN","COUNT_EQUAL","type=f32,ne=[4,500,1,1]","support","0","no","zDNN" +"zDNN","COUNT_EQUAL","type=f32,ne=[4,5000,1,1]","support","0","no","zDNN" +"zDNN","ARGMAX","type=f32,ne=[32,1,1,1]","support","0","no","zDNN" +"zDNN","ARGMAX","type=f32,ne=[100,10,1,1]","support","0","no","zDNN" +"zDNN","ARGMAX","type=f32,ne=[1024,10,1,1]","support","0","no","zDNN" +"zDNN","ARGMAX","type=f32,ne=[1024,12,1,1]","support","0","no","zDNN" +"zDNN","ARGMAX","type=f32,ne=[2000,10,1,1]","support","0","no","zDNN" +"zDNN","ARGMAX","type=f32,ne=[5438,3,1,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=f32,ne=[10,5,4,1],nr=[1,1,1,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=f32,ne=[10,5,4,1],nr=[2,1,1,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=f32,ne=[10,5,4,1],nr=[1,2,1,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=f32,ne=[10,5,4,1],nr=[1,1,2,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=f32,ne=[10,5,4,1],nr=[1,1,1,2]","support","0","no","zDNN" +"zDNN","REPEAT","type=i32,ne=[10,5,4,1],nr=[2,1,1,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=i16,ne=[10,5,4,1],nr=[1,1,1,2]","support","0","no","zDNN" +"zDNN","REPEAT","type=f32,ne=[10,5,4,3],nr=[1,1,1,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=f32,ne=[10,5,4,3],nr=[2,1,1,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=f32,ne=[10,5,4,3],nr=[1,2,1,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=f32,ne=[10,5,4,3],nr=[1,1,2,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=f32,ne=[10,5,4,3],nr=[1,1,1,2]","support","0","no","zDNN" +"zDNN","REPEAT","type=i32,ne=[10,5,4,3],nr=[2,1,1,1]","support","0","no","zDNN" +"zDNN","REPEAT","type=i16,ne=[10,5,4,3],nr=[1,1,1,2]","support","0","no","zDNN" +"zDNN","REPEAT_BACK","type=f32,ne=[8,6,4,2],nr=[1,1,1,1],v=0","support","0","no","zDNN" +"zDNN","REPEAT_BACK","type=f32,ne=[8,6,4,2],nr=[2,1,1,1],v=0","support","0","no","zDNN" +"zDNN","REPEAT_BACK","type=f32,ne=[8,6,4,2],nr=[1,2,1,1],v=0","support","0","no","zDNN" +"zDNN","REPEAT_BACK","type=f32,ne=[8,6,4,2],nr=[1,1,2,1],v=0","support","0","no","zDNN" +"zDNN","REPEAT_BACK","type=f32,ne=[8,6,4,2],nr=[1,1,1,2],v=0","support","0","no","zDNN" +"zDNN","REPEAT_BACK","type=f32,ne=[8,6,4,2],nr=[1,1,1,1],v=1","support","0","no","zDNN" +"zDNN","REPEAT_BACK","type=f32,ne=[8,6,4,2],nr=[2,1,1,1],v=1","support","0","no","zDNN" +"zDNN","REPEAT_BACK","type=f32,ne=[8,6,4,2],nr=[1,2,1,1],v=1","support","0","no","zDNN" +"zDNN","REPEAT_BACK","type=f32,ne=[8,6,4,2],nr=[1,1,2,1],v=1","support","0","no","zDNN" +"zDNN","REPEAT_BACK","type=f32,ne=[8,6,4,2],nr=[1,1,1,2],v=1","support","0","no","zDNN" +"zDNN","DUP","type=f32,ne=[10,10,20,1]","support","0","no","zDNN" +"zDNN","DUP","type=f16,ne=[10,10,20,1]","support","0","no","zDNN" +"zDNN","DUP","type=i32,ne=[10,10,20,1]","support","0","no","zDNN" +"zDNN","DUP","type=i16,ne=[10,10,20,1]","support","0","no","zDNN" +"zDNN","DUP","type=f32,ne=[10,10,5,1],permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","DUP","type=f16,ne=[10,10,5,1],permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","DUP","type=f32,ne=[10,10,5,1],permute=[1,0,2,3]","support","0","no","zDNN" +"zDNN","DUP","type=f16,ne=[10,10,5,1],permute=[1,0,2,3]","support","0","no","zDNN" +"zDNN","DUP","type=i16,ne=[10,8,3,1],permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","DUP","type=i16,ne=[10,8,3,1],permute=[1,2,0,3]","support","0","no","zDNN" +"zDNN","SET","type_src=f32,type_dst=f32,ne=[6,5,4,3],dim=1","support","0","no","zDNN" +"zDNN","SET","type_src=f32,type_dst=f32,ne=[6,5,4,3],dim=2","support","0","no","zDNN" +"zDNN","SET","type_src=f32,type_dst=f32,ne=[6,5,4,3],dim=3","support","0","no","zDNN" +"zDNN","SET","type_src=i32,type_dst=i32,ne=[6,5,4,3],dim=1","support","0","no","zDNN" +"zDNN","SET","type_src=i32,type_dst=i32,ne=[6,5,4,3],dim=2","support","0","no","zDNN" +"zDNN","SET","type_src=i32,type_dst=i32,ne=[6,5,4,3],dim=3","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[1,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[1,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[1,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[2,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[2,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[2,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[3,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[3,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[3,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[1,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[1,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[1,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[2,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[2,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[2,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[3,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[3,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[3,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[1,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[1,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[1,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[2,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[2,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[2,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[3,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[3,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[3,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=q4_0,ne=[32,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=q4_0,ne=[32,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=q4_0,ne=[32,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=q4_0,ne=[64,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=q4_0,ne=[64,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=q4_0,ne=[64,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=q4_0,ne=[96,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=q4_0,ne=[96,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=q4_0,ne=[96,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=q4_1,ne=[32,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=q4_1,ne=[32,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=q4_1,ne=[32,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=q4_1,ne=[64,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=q4_1,ne=[64,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=q4_1,ne=[64,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=q4_1,ne=[96,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=q4_1,ne=[96,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=q4_1,ne=[96,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=q5_0,ne=[32,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=q5_0,ne=[32,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=q5_0,ne=[32,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=q5_0,ne=[64,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=q5_0,ne=[64,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=q5_0,ne=[64,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=q5_0,ne=[96,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=q5_0,ne=[96,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=q5_0,ne=[96,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=q5_1,ne=[32,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=q5_1,ne=[32,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=q5_1,ne=[32,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=q5_1,ne=[64,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=q5_1,ne=[64,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=q5_1,ne=[64,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=q5_1,ne=[96,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=q5_1,ne=[96,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=q5_1,ne=[96,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=q8_0,ne=[32,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=q8_0,ne=[32,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=q8_0,ne=[32,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=q8_0,ne=[64,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=q8_0,ne=[64,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=q8_0,ne=[64,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=q8_0,ne=[96,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=q8_0,ne=[96,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=q8_0,ne=[96,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=q2_K,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=q2_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=q2_K,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=q2_K,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=q2_K,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=q2_K,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=q2_K,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=q2_K,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=q2_K,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=q3_K,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=q3_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=q3_K,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=q3_K,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=q3_K,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=q3_K,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=q3_K,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=q3_K,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=q3_K,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=q4_K,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=q4_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=q4_K,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=q4_K,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=q4_K,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=q4_K,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=q4_K,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=q4_K,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=q4_K,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=q5_K,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=q5_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=q5_K,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=q5_K,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=q5_K,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=q5_K,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=q5_K,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=q5_K,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=q5_K,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=q6_K,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=q6_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=q6_K,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=q6_K,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=q6_K,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=q6_K,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=q6_K,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=q6_K,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=q6_K,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=iq2_xxs,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=iq2_xxs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=iq2_xxs,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=iq2_xxs,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=iq2_xxs,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=iq2_xxs,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=iq2_xxs,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=iq2_xxs,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=iq2_xxs,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=iq2_xs,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=iq2_xs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=iq2_xs,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=iq2_xs,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=iq2_xs,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=iq2_xs,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=iq2_xs,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=iq2_xs,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=iq2_xs,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=iq2_s,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=iq2_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=iq2_s,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=iq2_s,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=iq2_s,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=iq2_s,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=iq2_s,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=iq2_s,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=iq2_s,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=iq3_xxs,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=iq3_xxs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=iq3_xxs,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=iq3_xxs,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=iq3_xxs,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=iq3_xxs,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=iq3_xxs,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=iq3_xxs,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=iq3_xxs,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=iq1_s,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=iq1_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=iq1_s,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=iq1_s,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=iq1_s,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=iq1_s,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=iq1_s,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=iq1_s,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=iq1_s,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=iq1_m,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=iq1_m,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=iq1_m,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=iq1_m,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=iq1_m,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=iq1_m,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=iq1_m,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=iq1_m,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=iq1_m,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=iq4_nl,ne=[32,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=iq4_nl,ne=[32,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=iq4_nl,ne=[32,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=iq4_nl,ne=[64,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=iq4_nl,ne=[64,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=iq4_nl,ne=[64,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=iq4_nl,ne=[96,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=iq4_nl,ne=[96,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=iq4_nl,ne=[96,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=iq3_s,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=iq3_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=iq3_s,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=iq3_s,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=iq3_s,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=iq3_s,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=iq3_s,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=iq3_s,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=iq3_s,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=iq4_xs,ne=[256,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=iq4_xs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=iq4_xs,ne=[256,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=iq4_xs,ne=[512,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=iq4_xs,ne=[512,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=iq4_xs,ne=[512,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=iq4_xs,ne=[768,2,3,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=iq4_xs,ne=[768,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=iq4_xs,ne=[768,2,3,4],permute_src=[0,3,1,2],permute_dst=[0,2,1,3]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=bf16,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=bf16,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q4_0,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q4_0,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q4_1,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q4_1,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q5_0,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q5_0,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q5_1,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q5_1,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q8_0,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q8_0,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q2_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q2_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q3_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q3_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q4_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q4_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q5_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q5_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q6_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=q6_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq2_xxs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq2_xxs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq2_xs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq2_xs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq2_s,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq2_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq3_xxs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq3_xxs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq1_s,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq1_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq1_m,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq1_m,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq4_nl,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq4_nl,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq3_s,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq3_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq4_xs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=iq4_xs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=f16,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=f16,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=bf16,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q4_0,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q4_0,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q4_1,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q4_1,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q5_0,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q5_0,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q5_1,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q5_1,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q8_0,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q8_0,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q2_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q2_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q3_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q3_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q4_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q4_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q5_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q5_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q6_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=q6_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq2_xxs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq2_xxs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq2_xs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq2_xs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq2_s,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq2_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq3_xxs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq3_xxs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq1_s,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq1_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq1_m,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq1_m,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq4_nl,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq4_nl,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq3_s,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq3_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq4_xs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=iq4_xs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f16,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f16,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=bf16,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=bf16,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q4_0,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q4_0,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q4_1,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q4_1,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q5_0,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q5_0,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q5_1,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q5_1,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q8_0,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q8_0,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q2_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q2_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q3_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q3_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q4_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q4_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q5_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q5_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q6_K,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=q6_K,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq2_xxs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq2_xxs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq2_xs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq2_xs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq2_s,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq2_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq3_xxs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq3_xxs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq1_s,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq1_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq1_m,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq1_m,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq4_nl,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq4_nl,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq3_s,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq3_s,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq4_xs,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=iq4_xs,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=bf16,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_0,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_1,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_0,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_1,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q8_0,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q2_K,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q3_K,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q4_K,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q5_K,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=q6_K,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xxs,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_xs,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq2_s,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_xxs,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_s,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq1_m,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_nl,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq3_s,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=f32,ne=[256,4,4,4],permute_src=[0,0,0,0],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=iq4_xs,type_dst=f32,ne=[256,2,3,4],permute_src=[0,2,1,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f16,ne=[256,2,3,4],permute_src=[1,0,2,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f16,type_dst=f32,ne=[256,2,3,4],permute_src=[1,0,2,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f16,ne=[256,2,3,4],permute_src=[1,0,2,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CPY","type_src=f32,type_dst=f32,ne=[256,2,3,4],permute_src=[1,0,2,3],permute_dst=[0,0,0,0]","support","0","no","zDNN" +"zDNN","CONT","type=f32,ne=[10,10,10,1]","support","0","no","zDNN" +"zDNN","CONT","type=f32,ne=[2,1,1,1]","support","0","no","zDNN" +"zDNN","CONT","type=f32,ne=[2,1,3,5]","support","0","no","zDNN" +"zDNN","CONT","type=f32,ne=[2,3,5,7]","support","0","no","zDNN" +"zDNN","CONT","type=f16,ne=[2,1,1,1]","support","0","no","zDNN" +"zDNN","CONT","type=f16,ne=[2,1,3,5]","support","0","no","zDNN" +"zDNN","CONT","type=f16,ne=[2,3,5,7]","support","0","no","zDNN" +"zDNN","CONT","type=bf16,ne=[2,1,1,1]","support","0","no","zDNN" +"zDNN","CONT","type=bf16,ne=[2,1,3,5]","support","0","no","zDNN" +"zDNN","CONT","type=bf16,ne=[2,3,5,7]","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1,1,8,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1,1,8,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1,1,8,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1,1,8,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1,1,1,1],nr=[32,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1,1,1,1],nr=[32,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1,1,1,1],nr=[32,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1,1,1,1],nr=[32,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1,1,320,320],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1,1,320,320],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1,1,320,320],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1,1,320,320],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[10,5,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[10,5,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[10,5,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[10,5,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[10,5,4,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[10,5,4,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[10,5,4,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[10,5,4,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[10,5,4,3],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[10,5,4,3],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[10,5,4,3],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[10,5,4,3],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[10,5,4,3],nr=[2,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[10,5,4,3],nr=[2,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[10,5,4,3],nr=[2,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[10,5,4,3],nr=[2,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[10,5,4,3],nr=[1,2,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[10,5,4,3],nr=[1,2,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[10,5,4,3],nr=[1,2,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[10,5,4,3],nr=[1,2,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[10,5,4,3],nr=[1,1,2,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[10,5,4,3],nr=[1,1,2,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[10,5,4,3],nr=[1,1,2,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[10,5,4,3],nr=[1,1,2,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[10,5,4,3],nr=[1,1,1,2],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[10,5,4,3],nr=[1,1,1,2],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[10,5,4,3],nr=[1,1,1,2],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[10,5,4,3],nr=[1,1,1,2],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[10,5,4,3],nr=[1,1,2,2],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[10,5,4,3],nr=[1,1,2,2],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[10,5,4,3],nr=[1,1,2,2],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[10,5,4,3],nr=[1,1,2,2],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[10,5,4,3],nr=[1,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[10,5,4,3],nr=[1,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[10,5,4,3],nr=[1,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[10,5,4,3],nr=[1,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[10,5,4,3],nr=[2,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[10,5,4,3],nr=[2,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[10,5,4,3],nr=[2,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[10,5,4,3],nr=[2,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1280,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1280,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1280,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1280,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1280,1,1,1],nr=[1,16,16,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1280,1,1,1],nr=[1,16,16,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1280,1,1,1],nr=[1,16,16,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1280,1,1,1],nr=[1,16,16,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1280,16,16,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1280,16,16,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1280,16,16,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1280,16,16,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1280,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1280,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1280,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1280,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1,1,1280,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1,1,1280,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1,1,1280,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1,1,1280,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[16,16,1280,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[16,16,1280,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[16,16,1280,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[16,16,1280,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1,1,1920,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1,1,1920,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1,1,1920,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1,1,1920,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1,1,2560,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1,1,2560,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1,1,2560,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1,1,2560,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1,1,1280,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1,1,1280,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1,1,1280,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1,1,1280,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1,1,1920,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1,1,1920,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1,1,1920,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1,1,1920,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[1,1,640,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[1,1,640,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[1,1,640,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[1,1,640,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[5120,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[5120,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[5120,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[5120,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f16,ne=[640,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f16,ne=[640,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f16,ne=[640,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f16,ne=[640,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1,1,8,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1,1,8,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1,1,8,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1,1,8,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1,1,1,1],nr=[32,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1,1,1,1],nr=[32,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1,1,1,1],nr=[32,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1,1,1,1],nr=[32,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1,1,320,320],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1,1,320,320],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1,1,320,320],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1,1,320,320],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[10,5,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[10,5,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[10,5,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[10,5,4,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[10,5,4,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[10,5,4,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[10,5,4,3],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[10,5,4,3],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[10,5,4,3],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[2,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[10,5,4,3],nr=[2,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[10,5,4,3],nr=[2,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[10,5,4,3],nr=[2,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[1,2,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[10,5,4,3],nr=[1,2,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[10,5,4,3],nr=[1,2,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[10,5,4,3],nr=[1,2,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[1,1,2,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[10,5,4,3],nr=[1,1,2,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[10,5,4,3],nr=[1,1,2,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[10,5,4,3],nr=[1,1,2,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[1,1,1,2],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[10,5,4,3],nr=[1,1,1,2],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[10,5,4,3],nr=[1,1,1,2],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[10,5,4,3],nr=[1,1,1,2],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[1,1,2,2],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[10,5,4,3],nr=[1,1,2,2],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[10,5,4,3],nr=[1,1,2,2],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[10,5,4,3],nr=[1,1,2,2],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[1,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[10,5,4,3],nr=[1,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[10,5,4,3],nr=[1,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[10,5,4,3],nr=[1,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[2,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[10,5,4,3],nr=[2,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[10,5,4,3],nr=[2,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[10,5,4,3],nr=[2,2,2,2],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1280,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1280,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1280,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1280,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1280,1,1,1],nr=[1,16,16,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1280,1,1,1],nr=[1,16,16,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1280,1,1,1],nr=[1,16,16,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1280,1,1,1],nr=[1,16,16,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1280,16,16,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1280,16,16,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1280,16,16,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1280,16,16,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1280,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1280,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1280,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1280,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1,1,1280,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1,1,1280,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1,1,1280,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1,1,1280,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[16,16,1280,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[16,16,1280,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[16,16,1280,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[16,16,1280,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1,1,1920,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1,1,1920,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1,1,1920,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1,1,1920,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1,1,2560,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1,1,2560,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1,1,2560,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1,1,2560,1],nr=[16,16,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1,1,1280,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1,1,1280,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1,1,1280,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1,1,1280,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1,1,1920,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1,1,1920,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1,1,1920,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1,1,1920,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[1,1,640,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[1,1,640,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[1,1,640,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[1,1,640,1],nr=[32,32,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[5120,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[5120,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[5120,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[5120,1,1,1],nr=[1,256,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[640,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","SUB","type=f32,ne=[640,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","MUL","type=f32,ne=[640,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","DIV","type=f32,ne=[640,1,1,1],nr=[1,1,1,1],nf=1","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[2,1,1,1],nf=2","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[16,5,4,3],nr=[1,2,1,1],nf=3","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[1,1,2,1],nf=4","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[16,5,4,3],nr=[1,1,1,2],nf=5","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[1,1,2,2],nf=6","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[10,5,4,3],nr=[1,2,2,2],nf=7","support","0","no","zDNN" +"zDNN","ADD","type=f32,ne=[16,5,4,3],nr=[2,2,2,2],nf=8","support","0","no","zDNN" +"zDNN","ADD1","type=f32,ne=[10,5,4,3]","support","0","no","zDNN" +"zDNN","SCALE","type=f32,ne=[10,10,10,10],scale=2.000000,bias=0.000000","support","0","no","zDNN" +"zDNN","SCALE","type=f32,ne=[10,10,10,10],scale=2.000000,bias=1.000000","support","0","no","zDNN" +"zDNN","SOFTCAP","type=f32,ne=[10,10,10,10],softcap=50.000000","support","0","no","zDNN" +"zDNN","SILU_BACK","type=f32,ne=[64,5,4,3],eps=0.000001","support","0","no","zDNN" +"zDNN","NORM","type=f32,ne=[64,5,4,3],v=0,eps=0.000000","support","0","no","zDNN" +"zDNN","RMS_NORM","type=f32,ne=[64,5,4,3],v=0,eps=0.000000","support","0","no","zDNN" +"zDNN","NORM","type=f32,ne=[64,5,4,3],v=1,eps=0.000000","support","0","no","zDNN" +"zDNN","RMS_NORM","type=f32,ne=[64,5,4,3],v=1,eps=0.000000","support","0","no","zDNN" +"zDNN","RMS_NORM_BACK","type=f32,ne=[64,5,4,3],eps=0.000000","support","0","no","zDNN" +"zDNN","L2_NORM","type=f32,ne=[64,5,4,3]","support","0","no","zDNN" +"zDNN","NORM","type=f32,ne=[64,5,4,3],v=0,eps=0.000001","support","0","no","zDNN" +"zDNN","RMS_NORM","type=f32,ne=[64,5,4,3],v=0,eps=0.000001","support","0","no","zDNN" +"zDNN","NORM","type=f32,ne=[64,5,4,3],v=1,eps=0.000001","support","0","no","zDNN" +"zDNN","RMS_NORM","type=f32,ne=[64,5,4,3],v=1,eps=0.000001","support","0","no","zDNN" +"zDNN","RMS_NORM_BACK","type=f32,ne=[64,5,4,3],eps=0.000001","support","0","no","zDNN" +"zDNN","L2_NORM","type=f32,ne=[64,5,4,3]","support","0","no","zDNN" +"zDNN","NORM","type=f32,ne=[64,5,4,3],v=0,eps=0.000100","support","0","no","zDNN" +"zDNN","RMS_NORM","type=f32,ne=[64,5,4,3],v=0,eps=0.000100","support","0","no","zDNN" +"zDNN","NORM","type=f32,ne=[64,5,4,3],v=1,eps=0.000100","support","0","no","zDNN" +"zDNN","RMS_NORM","type=f32,ne=[64,5,4,3],v=1,eps=0.000100","support","0","no","zDNN" +"zDNN","RMS_NORM_BACK","type=f32,ne=[64,5,4,3],eps=0.000100","support","0","no","zDNN" +"zDNN","L2_NORM","type=f32,ne=[64,5,4,3]","support","0","no","zDNN" +"zDNN","NORM","type=f32,ne=[64,5,4,3],v=0,eps=0.100000","support","0","no","zDNN" +"zDNN","RMS_NORM","type=f32,ne=[64,5,4,3],v=0,eps=0.100000","support","0","no","zDNN" +"zDNN","NORM","type=f32,ne=[64,5,4,3],v=1,eps=0.100000","support","0","no","zDNN" +"zDNN","RMS_NORM","type=f32,ne=[64,5,4,3],v=1,eps=0.100000","support","0","no","zDNN" +"zDNN","RMS_NORM_BACK","type=f32,ne=[64,5,4,3],eps=0.100000","support","0","no","zDNN" +"zDNN","L2_NORM","type=f32,ne=[64,5,4,3]","support","0","no","zDNN" +"zDNN","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=0.000000,broadcast=0","support","0","no","zDNN" +"zDNN","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=0.000000,broadcast=1","support","0","no","zDNN" +"zDNN","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=0.000001,broadcast=0","support","0","no","zDNN" +"zDNN","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=0.000001,broadcast=1","support","0","no","zDNN" +"zDNN","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=0.000100,broadcast=0","support","0","no","zDNN" +"zDNN","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=0.000100,broadcast=1","support","0","no","zDNN" +"zDNN","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=0.100000,broadcast=0","support","0","no","zDNN" +"zDNN","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=0.100000,broadcast=1","support","0","no","zDNN" +"zDNN","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=1.000000,broadcast=0","support","0","no","zDNN" +"zDNN","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=1.000000,broadcast=1","support","0","no","zDNN" +"zDNN","L2_NORM","type=f32,ne=[64,5,4,3]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,1024,1,1],ne_b=[3,1024,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[8,1024,1,1],ne_b=[3,1024,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,1024,4,1],ne_b=[3,1024,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,1536,1,1],ne_b=[3,1536,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[8,1536,1,1],ne_b=[3,1536,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,1536,4,1],ne_b=[3,1536,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,2048,1,1],ne_b=[3,2048,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[8,2048,1,1],ne_b=[3,2048,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,2048,4,1],ne_b=[3,2048,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,1024,1,1],ne_b=[4,1024,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[8,1024,1,1],ne_b=[4,1024,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,1024,4,1],ne_b=[4,1024,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,1536,1,1],ne_b=[4,1536,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[8,1536,1,1],ne_b=[4,1536,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,1536,4,1],ne_b=[4,1536,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,2048,1,1],ne_b=[4,2048,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[8,2048,1,1],ne_b=[4,2048,1,1]","support","0","no","zDNN" +"zDNN","SSM_CONV","type=f32,ne_a=[4,2048,4,1],ne_b=[4,2048,1,1]","support","0","no","zDNN" +"zDNN","SSM_SCAN","type=f32,d_state=16,head_dim=1,n_head=1024,n_group=1,n_seq_tokens=32,n_seqs=4","support","0","no","zDNN" +"zDNN","SSM_SCAN","type=f32,d_state=128,head_dim=64,n_head=16,n_group=2,n_seq_tokens=32,n_seqs=4","support","0","no","zDNN" +"zDNN","SSM_SCAN","type=f32,d_state=256,head_dim=64,n_head=8,n_group=2,n_seq_tokens=32,n_seqs=4","support","0","no","zDNN" +"zDNN","RWKV_WKV6","type=f32,head_count=32,head_size=64,n_seq_tokens=1,n_seqs=1","support","0","no","zDNN" +"zDNN","RWKV_WKV6","type=f32,head_count=32,head_size=64,n_seq_tokens=32,n_seqs=1","support","0","no","zDNN" +"zDNN","RWKV_WKV6","type=f32,head_count=32,head_size=64,n_seq_tokens=32,n_seqs=4","support","0","no","zDNN" +"zDNN","RWKV_WKV6","type=f32,head_count=32,head_size=64,n_seq_tokens=128,n_seqs=4","support","0","no","zDNN" +"zDNN","RWKV_WKV7","type=f32,head_count=32,head_size=64,n_seq_tokens=1,n_seqs=1","support","0","no","zDNN" +"zDNN","RWKV_WKV7","type=f32,head_count=32,head_size=64,n_seq_tokens=32,n_seqs=1","support","0","no","zDNN" +"zDNN","RWKV_WKV7","type=f32,head_count=32,head_size=64,n_seq_tokens=32,n_seqs=4","support","0","no","zDNN" +"zDNN","RWKV_WKV7","type=f32,head_count=32,head_size=64,n_seq_tokens=128,n_seqs=4","support","0","no","zDNN" +"zDNN","GATED_LINEAR_ATTN","type=f32,head_count=32,head_size=64,n_seq_tokens=1,n_seqs=1","support","0","no","zDNN" +"zDNN","GATED_LINEAR_ATTN","type=f32,head_count=32,head_size=64,n_seq_tokens=32,n_seqs=1","support","0","no","zDNN" +"zDNN","GATED_LINEAR_ATTN","type=f32,head_count=32,head_size=64,n_seq_tokens=32,n_seqs=4","support","0","no","zDNN" +"zDNN","GATED_LINEAR_ATTN","type=f32,head_count=32,head_size=64,n_seq_tokens=128,n_seqs=4","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q2_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q2_K,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q2_K,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q2_K,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q2_K,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q2_K,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q2_K,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q2_K,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q2_K,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q3_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q3_K,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q3_K,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q3_K,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q3_K,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q3_K,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q3_K,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q3_K,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q3_K,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_K,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_K,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_K,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_K,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_K,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_K,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_K,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_K,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q6_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q6_K,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q6_K,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q6_K,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q6_K,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q6_K,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q6_K,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q6_K,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q6_K,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xs,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xs,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xs,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xs,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xs,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xs,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xs,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xs,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xs,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_s,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_s,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_s,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_s,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_s,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_s,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_s,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_s,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_s,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_xxs,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_xxs,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_xxs,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_xxs,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_xxs,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_xxs,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_xxs,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_xxs,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_xxs,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_s,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_s,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_s,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_s,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_s,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_s,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_s,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_s,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_s,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_m,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_m,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_m,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_m,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_m,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_m,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_m,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_m,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_m,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_s,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_s,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_s,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_s,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_s,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_s,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_s,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_s,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_s,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_xs,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_xs,type_b=f32,m=16,n=2,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_xs,type_b=f32,m=16,n=3,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_xs,type_b=f32,m=16,n=4,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_xs,type_b=f32,m=16,n=5,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_xs,type_b=f32,m=16,n=6,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_xs,type_b=f32,m=16,n=7,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_xs,type_b=f32,m=16,n=8,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_xs,type_b=f32,m=16,n=9,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","1","yes","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f16,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=8,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=4,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f16,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f16,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f32,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_0,type_b=f16,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f16,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f32,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_K,type_b=f16,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f32,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[1,1],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[3,1],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[1,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[3,2],nr=[2,2],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=8,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,1,3,2],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=256,bs=[2,3],nr=[1,1],per=[0,3,2,1],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=1,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=8,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xxs,type_b=f16,m=16,n=16,k=1024,bs=[3,2],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=32,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q4_1,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=1,k=32,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=1,k=32,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_1,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=32,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q8_0,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q2_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q3_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q5_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=q6_K,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_xs,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq2_s,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_xxs,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_s,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq1_m,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=1,k=32,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_nl,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq3_s,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=iq4_xs,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=1,k=1,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=16,n=1,k=256,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=64,n=2,k=128,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=83,n=2,k=128,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=64,n=2,k=64,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=83,n=2,k=64,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=64,n=45,k=128,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=45,k=64,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=193,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=67,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=128,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1056,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=128,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1056,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=128,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1056,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=129,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1057,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=129,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1057,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=129,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1057,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=128,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1056,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=128,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1056,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=128,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1056,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=129,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1057,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=129,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1057,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=129,bs=[1,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1057,bs=[1,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=128,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1056,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=128,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1056,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=128,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1056,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=129,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1057,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=129,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1057,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=129,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1057,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=128,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1056,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=128,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1056,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=128,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1056,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=129,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1057,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=129,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1057,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=129,bs=[1,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1057,bs=[1,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=128,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1056,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=128,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1056,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=128,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1056,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=129,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1057,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=129,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1057,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=129,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1057,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=128,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1056,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=128,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1056,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=128,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1056,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=129,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1057,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=129,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1057,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=129,bs=[2,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1057,bs=[2,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=128,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1056,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=128,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1056,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=128,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1056,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=129,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1057,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=129,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1057,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=129,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1057,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=128,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1056,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=128,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1056,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=128,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1056,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=129,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1057,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=129,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1057,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=129,bs=[2,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1057,bs=[2,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=128,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1056,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=128,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1056,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=128,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1056,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=129,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1057,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=129,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1057,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=129,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1057,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=128,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1056,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=128,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1056,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=128,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1056,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=129,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1057,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=129,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1057,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=129,bs=[4,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1057,bs=[4,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=128,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1056,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=128,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1056,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=128,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1056,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=129,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1057,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=129,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1057,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=129,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1057,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=128,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1056,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=128,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1056,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=128,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1056,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=129,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1057,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=129,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1057,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=129,bs=[4,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1057,bs=[4,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=128,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1056,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=128,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1056,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=128,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1056,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=129,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1057,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=129,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1057,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=129,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1057,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=128,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1056,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=128,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1056,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=128,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1056,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=129,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1057,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=129,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1057,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=129,bs=[8,1],nr=[1,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1057,bs=[8,1],nr=[1,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=128,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1056,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=128,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1056,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=128,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1056,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1056,n=1,k=129,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=128,n=1,k=1057,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1056,n=1,k=129,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=128,n=1,k=1057,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1056,n=1,k=129,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=128,n=1,k=1057,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=128,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1056,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=128,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1056,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=128,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1056,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=1057,n=1,k=129,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f16,type_b=f32,m=129,n=1,k=1057,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=1057,n=1,k=129,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=bf16,type_b=f32,m=129,n=1,k=1057,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=1057,n=1,k=129,bs=[8,1],nr=[4,1],per=[0,2,1,3],v=0","support","0","no","zDNN" +"zDNN","MUL_MAT","type_a=f32,type_b=f32,m=129,n=1,k=1057,bs=[8,1],nr=[4,1],per=[0,1,2,3],v=1","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=16,n_used=16,b=0,m=32,n=1024,k=16","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=16,n_used=16,b=1,m=32,n=1024,k=16","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f32,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=f16,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_0,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_K,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=4,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=1,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=1,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=2,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=2,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=4,b=0,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xxs,type_b=f32,n_mats=8,n_used=4,b=1,m=512,n=129,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q4_1,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q5_0,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q5_0,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q5_1,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q5_1,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q8_0,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q2_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q2_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q3_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q3_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q5_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q5_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q6_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=q6_K,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xs,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_xs,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_s,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq2_s,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq3_xxs,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq3_xxs,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq1_s,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq1_s,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq1_m,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq1_m,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq4_nl,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq4_nl,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq3_s,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq3_s,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq4_xs,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=iq4_xs,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=bf16,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=1,k=256","support","0","no","zDNN" +"zDNN","MUL_MAT_ID","type_a=bf16,type_b=f32,n_mats=4,n_used=2,b=0,m=512,n=32,k=256","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f32,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=f16,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q8_0,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_0,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_1,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=q4_K,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f32,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=1,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=1,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[1,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[1,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[3,1],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[1,2],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,1],trans_b=0","support","0","no","zDNN" +"zDNN","OUT_PROD","type_a=iq2_xxs,type_b=f16,m=256,n=16,k=16,bs=[3,3],nr=[2,2],trans_b=0","support","0","no","zDNN" +"zDNN","SQR","type=f16,ne=[10,5,4,3]","support","0","no","zDNN" +"zDNN","SQRT","type=f16,ne=[10,3,3,2]","support","0","no","zDNN" +"zDNN","LOG","type=f16,ne=[10,5,4,3]","support","0","no","zDNN" +"zDNN","SIN","type=f16,ne=[10,2,2,2]","support","0","no","zDNN" +"zDNN","COS","type=f16,ne=[10,2,2,2]","support","0","no","zDNN" +"zDNN","CLAMP","type=f16,ne=[10,5,4,3],min=-0.500000,max=0.500000","support","0","no","zDNN" +"zDNN","SQR","type=f32,ne=[10,5,4,3]","support","0","no","zDNN" +"zDNN","SQRT","type=f32,ne=[10,3,3,2]","support","0","no","zDNN" +"zDNN","LOG","type=f32,ne=[10,5,4,3]","support","0","no","zDNN" +"zDNN","SIN","type=f32,ne=[10,2,2,2]","support","0","no","zDNN" +"zDNN","COS","type=f32,ne=[10,2,2,2]","support","0","no","zDNN" +"zDNN","CLAMP","type=f32,ne=[10,5,4,3],min=-0.500000,max=0.500000","support","0","no","zDNN" +"zDNN","DIAG_MASK_INF","type=f32,ne=[10,10,1,1],n_past=5","support","0","no","zDNN" +"zDNN","DIAG_MASK_INF","type=f32,ne=[10,10,3,1],n_past=5","support","0","no","zDNN" +"zDNN","DIAG_MASK_INF","type=f32,ne=[10,10,3,2],n_past=5","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,1024,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,1023,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,16,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,15,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,1024,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,1023,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,1024,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,1023,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,16,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,15,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,1024,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,1023,1,1],mask=0,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,3],mask=1,m_prec=f32,nr23=[3,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f32,nr23=[2,3],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,3],mask=1,m_prec=f16,nr23=[3,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f16,nr23=[2,3],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,1024,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,1023,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,1024,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,1023,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,16,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,15,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,16,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,15,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,1024,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,1023,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,1024,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,1023,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,3],mask=1,m_prec=f32,nr23=[3,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f32,nr23=[2,3],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,3],mask=1,m_prec=f16,nr23=[3,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f16,nr23=[2,3],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,1024,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,1023,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,1024,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,1023,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,16,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,15,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,16,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,15,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,1024,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,1023,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,1024,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,1023,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,3],mask=1,m_prec=f32,nr23=[3,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f32,nr23=[2,3],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,3],mask=1,m_prec=f16,nr23=[3,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f16,nr23=[2,3],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,1024,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,1023,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,1024,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,1023,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,16,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,15,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,16,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,15,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,1024,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,1023,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,1024,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,1023,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,3],mask=1,m_prec=f32,nr23=[3,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f32,nr23=[2,3],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,16,1,3],mask=1,m_prec=f16,nr23=[3,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,15,1,1],mask=1,m_prec=f16,nr23=[2,3],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,1024,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,1023,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,1024,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[15,1023,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,16,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,15,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,16,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,15,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,1024,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,1023,1,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1024,1024,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[1023,1023,1,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,2,32,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,2,32,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[16,2,32,1],mask=0,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[32,2,32,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[32,2,32,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[32,2,32,1],mask=1,m_prec=f32,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX","type=f32,ne=[32,2,32,1],mask=1,m_prec=f16,nr23=[1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[16,16,1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[15,15,1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[16,1024,1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[15,1023,1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1024,16,1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1023,15,1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1024,1024,1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1023,1023,1,1],scale=1.000000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[16,16,1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[15,15,1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[16,1024,1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[15,1023,1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1024,16,1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1023,15,1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1024,1024,1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1023,1023,1,1],scale=0.100000,max_bias=0.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[16,16,1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[15,15,1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[16,1024,1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[15,1023,1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1024,16,1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1023,15,1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1024,1024,1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1023,1023,1,1],scale=1.000000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[16,16,1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[15,15,1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[16,1024,1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[15,1023,1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1024,16,1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1023,15,1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1024,1024,1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","SOFT_MAX_BACK","type=f32,ne=[1023,1023,1,1],scale=0.100000,max_bias=8.000000","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,40,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,52,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,64,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,1,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,71,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,8,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,12,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,28,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,12,2,1],n_dims=20,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,28,2,1],n_dims=32,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,16,2,1],n_dims=80,mode=24,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,40,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,52,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,64,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,1,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,71,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,8,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,12,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,28,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,12,2,1],n_dims=20,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,28,2,1],n_dims=32,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,16,2,1],n_dims=80,mode=24,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,40,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,52,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,64,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,1,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,71,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,8,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,12,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,28,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,12,2,1],n_dims=20,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,28,2,1],n_dims=32,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,16,2,1],n_dims=80,mode=24,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,40,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,52,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,64,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,1,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,71,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,8,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,12,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,28,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,12,2,1],n_dims=20,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,28,2,1],n_dims=32,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[80,16,2,1],n_dims=80,mode=24,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,40,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,52,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,64,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,1,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,71,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,8,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,12,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,28,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,12,2,1],n_dims=20,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,28,2,1],n_dims=32,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,16,2,1],n_dims=80,mode=24,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,40,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,52,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,64,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,1,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,71,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,8,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,12,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,28,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,12,2,1],n_dims=20,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,28,2,1],n_dims=32,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,16,2,1],n_dims=80,mode=24,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,40,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,52,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,64,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,1,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,71,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,8,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,12,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,28,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,12,2,1],n_dims=20,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,28,2,1],n_dims=32,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,16,2,1],n_dims=80,mode=24,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,40,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,52,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,64,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,1,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,71,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,8,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=20,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,2,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,32,4,1],n_dims=32,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,12,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,28,2,1],n_dims=128,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,12,2,1],n_dims=20,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,28,2,1],n_dims=32,mode=8,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[80,16,2,1],n_dims=80,mode=24,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.000000,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.000000,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.000000,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f32,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=0,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=0","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[128,32,2,1],n_dims=128,mode=0,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","ROPE_BACK","type=f16,ne_a=[64,128,2,1],n_dims=64,mode=2,n_ctx=512,fs=1.424500,ef=0.746500,af=1.424500,ff=1,v=1","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=0,v=0","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=0,v=0","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=1,v=0","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=1,v=0","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=2,v=0","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=2,v=0","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=3,v=0","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=3,v=0","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=0,v=1","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=0,v=1","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=1,v=1","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=1,v=1","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=2,v=1","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=2,v=1","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=3,v=1","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=3,v=1","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=0,v=2","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=0,v=2","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=1,v=2","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=1,v=2","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=2,v=2","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=2,v=2","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=3,v=2","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=3,v=2","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=0,v=3","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=0,v=3","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=1,v=3","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=1,v=3","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=2,v=3","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=2,v=3","support","0","no","zDNN" +"zDNN","CONCAT","type=f32,ne_a=[11,12,13,14],ne_b_d=7,dim=3,v=3","support","0","no","zDNN" +"zDNN","CONCAT","type=i32,ne_a=[11,12,13,14],ne_b_d=7,dim=3,v=3","support","0","no","zDNN" +"zDNN","ARGSORT","type=f32,ne=[8,1,1,1],order=0","support","0","no","zDNN" +"zDNN","ARGSORT","type=f32,ne=[16,10,10,10],order=0","support","0","no","zDNN" +"zDNN","ARGSORT","type=f32,ne=[60,10,10,10],order=0","support","0","no","zDNN" +"zDNN","ARGSORT","type=f32,ne=[8,1,1,1],order=1","support","0","no","zDNN" +"zDNN","ARGSORT","type=f32,ne=[16,10,10,10],order=1","support","0","no","zDNN" +"zDNN","ARGSORT","type=f32,ne=[60,10,10,10],order=1","support","0","no","zDNN" +"zDNN","UPSCALE","type=f32,ne=[512,512,3,2],scale_factor=2,mode=nearest,transpose=0","support","0","no","zDNN" +"zDNN","UPSCALE","type=f32,ne=[512,512,3,2],scale_factor=2,mode=nearest,transpose=1","support","0","no","zDNN" +"zDNN","UPSCALE","type=f32,ne=[2,5,7,11],ne_tgt=[5,7,11,13],mode=0","support","0","no","zDNN" +"zDNN","UPSCALE","type=f32,ne=[5,7,11,13],ne_tgt=[2,5,7,11],mode=0","support","0","no","zDNN" +"zDNN","UPSCALE","type=f32,ne=[512,512,3,2],scale_factor=2,mode=bilinear,transpose=0","support","0","no","zDNN" +"zDNN","UPSCALE","type=f32,ne=[512,512,3,2],scale_factor=2,mode=bilinear,transpose=1","support","0","no","zDNN" +"zDNN","UPSCALE","type=f32,ne=[2,5,7,11],ne_tgt=[5,7,11,13],mode=1","support","0","no","zDNN" +"zDNN","UPSCALE","type=f32,ne=[5,7,11,13],ne_tgt=[2,5,7,11],mode=1","support","0","no","zDNN" +"zDNN","UPSCALE","type=f32,ne=[2,5,7,11],ne_tgt=[5,7,11,13],mode=257","support","0","no","zDNN" +"zDNN","SUM","type=f32,ne=[10,5,4,3]","support","0","no","zDNN" +"zDNN","SUM_ROWS","type=f32,ne=[10,5,4,3]","support","0","no","zDNN" +"zDNN","MEAN","type=f32,ne=[10,5,4,3]","support","0","no","zDNN" +"zDNN","GROUP_NORM","type=f32,ne=[64,64,320,1],num_groups=32,eps=0.000001","support","0","no","zDNN" +"zDNN","GROUP_NORM","type=f32,ne=[9,9,1280,1],num_groups=32,eps=0.000001","support","0","no","zDNN" +"zDNN","ACC","type=f32,ne_a=[256,17,1,1],ne_b=[256,16,1,1]","support","0","no","zDNN" +"zDNN","PAD","type=f32,ne_a=[512,512,1,1],pad_0=1,pad_1=1","support","0","no","zDNN" +"zDNN","PAD_REFLECT_1D","type=f32,ne_a=[512,34,2,1],pad_0=10,pad_1=9","support","0","no","zDNN" +"zDNN","ROLL","shift0=3,shift1=-2,shift3=1,shift4=-1","support","0","no","zDNN" +"zDNN","ARANGE","type=f32,start=0.000000,stop=10.000000,step=1.000000","support","0","no","zDNN" +"zDNN","TIMESTEP_EMBEDDING","type=f32,ne_a=[2,1,1,1],dim=320,max_period=10000","support","0","no","zDNN" +"zDNN","LEAKY_RELU","type=f32,ne_a=[10,5,4,3],negative_slope=0.100000","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[1,3],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=64,hsv=64,nh=4,nr23=[4,3],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=80,hsv=80,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=128,hsv=128,nh=4,nr23=[16,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=10.000000,prec=def,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=128,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=192,hsv=192,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=256,hsv=256,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,2,1,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=1,max_bias=8.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[1,1],kv=1024,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=1,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=3,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=32,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=f16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=bf16,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q8_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","FLASH_ATTN_EXT","hsk=576,hsv=512,nh=4,nr23=[4,1],kv=512,nb=35,mask=0,max_bias=0.000000,logit_softcap=0.000000,prec=f32,type_KV=q4_0,permute=[0,1,2,3]","support","0","no","zDNN" +"zDNN","CROSS_ENTROPY_LOSS","type=f32,ne=[10,5,4,3]","support","0","no","zDNN" +"zDNN","CROSS_ENTROPY_LOSS","type=f32,ne=[30000,1,1,1]","support","0","no","zDNN" +"zDNN","CROSS_ENTROPY_LOSS_BACK","type=f32,ne=[10,5,4,3]","support","0","no","zDNN" +"zDNN","CROSS_ENTROPY_LOSS_BACK","type=f32,ne=[30000,1,1,1]","support","0","no","zDNN" +"zDNN","OPT_STEP_ADAMW","type=f32,ne=[10,5,4,3]","support","0","no","zDNN" diff --git a/examples/eval-callback/eval-callback.cpp b/examples/eval-callback/eval-callback.cpp index 4afd80eb45..61eefc7248 100644 --- a/examples/eval-callback/eval-callback.cpp +++ b/examples/eval-callback/eval-callback.cpp @@ -7,6 +7,7 @@ #include #include #include +#include /** * This the arbitrary data which will be passed to each callback. @@ -77,6 +78,12 @@ static void ggml_print_tensor(uint8_t * data, ggml_type type, const int64_t * ne LOG(" ]\n"); LOG(" sum = %f\n", sum); } + + // TODO: make this abort configurable/optional? + if (std::isnan(sum)) { + LOG_ERR("encountered NaN - aborting\n"); + exit(0); + } } /** diff --git a/examples/speculative-simple/speculative-simple.cpp b/examples/speculative-simple/speculative-simple.cpp index 722cd7f40f..a8e53f28eb 100644 --- a/examples/speculative-simple/speculative-simple.cpp +++ b/examples/speculative-simple/speculative-simple.cpp @@ -59,6 +59,8 @@ int main(int argc, char ** argv) { } params.cpuparams_batch.n_threads = params.speculative.cpuparams_batch.n_threads; + params.tensor_buft_overrides = params.speculative.tensor_buft_overrides; + common_init_result llama_init_dft = common_init_from_params(params); //model_dft = llama_init_dft.model.get(); diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 0adffdb006..8449406a6d 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -85,6 +85,8 @@ int main(int argc, char ** argv) { } params.cpuparams_batch.n_threads = params.speculative.cpuparams_batch.n_threads; + params.tensor_buft_overrides = params.speculative.tensor_buft_overrides; + common_init_result llama_init_dft = common_init_from_params(params); model_dft = llama_init_dft.model.get(); diff --git a/examples/training/finetune.cpp b/examples/training/finetune.cpp index 23bede49b1..416d8d8f6c 100644 --- a/examples/training/finetune.cpp +++ b/examples/training/finetune.cpp @@ -10,20 +10,20 @@ #include #if defined(_MSC_VER) -#pragma warning(disable: 4244 4267) // possible loss of data +#pragma warning(disable: 4244 4267) // possible loss of data #endif int main(int argc, char ** argv) { common_params params; - params.escape = false; - if (!common_params_parse(argc, argv, params, LLAMA_EXAMPLE_PERPLEXITY)) { + if (!common_params_parse(argc, argv, params, LLAMA_EXAMPLE_FINETUNE)) { return 1; } if (params.use_mmap) { - LOG_INF("%s: force disabling memory mapping because it would result in-read-only pointers to the weights\n", __func__); + LOG_INF("%s: force disabling memory mapping because it would result in-read-only pointers to the weights\n", + __func__); params.use_mmap = false; } if (params.cache_type_k != GGML_TYPE_F32) { @@ -38,11 +38,10 @@ int main(int argc, char ** argv) { common_init(); llama_backend_init(); llama_numa_init(params.numa); - // load the model and apply lora adapter, if any - common_init_result llama_init = common_init_from_params(params); - llama_model_ptr & model = llama_init.model; - llama_context_ptr & ctx = llama_init.context; + common_init_result llama_init = common_init_from_params(params); + llama_model_ptr & model = llama_init.model; + llama_context_ptr & ctx = llama_init.context; if (model == NULL) { LOG_ERR("%s: unable to load model\n", __func__); @@ -55,31 +54,32 @@ int main(int argc, char ** argv) { LOG_INF("%s\n", common_params_get_system_info(params).c_str()); } - constexpr float val_split = 0.05f; + std::vector tokens = common_tokenize(ctx.get(), params.prompt, true); + ggml_opt_dataset_t dataset = common_opt_dataset_init(ctx.get(), tokens, llama_n_ctx(ctx.get()) / 2); - std::vector tokens = common_tokenize(ctx.get(), params.prompt, true); - ggml_opt_dataset_t dataset = common_opt_dataset_init(ctx.get(), tokens, llama_n_ctx(ctx.get())/2); + struct lr_opt & lr = params.lr; + LOG_INF("-optimizer %s -lr0 %.2g -wd %.2g -lr-min %.2g -min-epochs %.2g -epochs %d -period %.2g -val %.2g\n", + ggml_opt_optimizer_name(params.optimizer), (double) lr.lr0, (double) lr.wd, (double) lr.lr_min, (double) lr.decay_epochs, + (unsigned) lr.epochs, (double) params.n_batch / params.n_ubatch, (double) params.val_split); - struct ggml_opt_optimizer_params optimizer_params = ggml_opt_get_default_optimizer_params(nullptr); - optimizer_params.adamw.alpha = 1e-7f; // learning rate - - struct llama_opt_params lopt_params { - /*n_ctx_train =*/ 0, - /*param_filter =*/ llama_opt_param_filter_all, - /*param_filter_ud =*/ nullptr, - /*get_opt_pars =*/ ggml_opt_get_constant_optimizer_params, - /*get_opt_pars_ud =*/ &optimizer_params, + struct llama_opt_params lopt_params{ + /*n_ctx_train =*/0, + /*param_filter =*/llama_opt_param_filter_all, + /*param_filter_ud =*/nullptr, + /*get_opt_pars =*/common_opt_lr_pars, + /*get_opt_pars_ud =*/¶ms.lr, + /*optimizer_type =*/params.optimizer, }; llama_opt_init(ctx.get(), model.get(), lopt_params); - const int64_t idata_split = ggml_opt_dataset_ndata(dataset) * (1.0f - val_split); + const int64_t idata_split = ggml_opt_dataset_ndata(dataset) * (1.0f - params.val_split); ggml_opt_result_t result_train = ggml_opt_result_init(); ggml_opt_result_t result_eval = ggml_opt_result_init(); - for (int epoch = 0; epoch < 2; ++epoch) { + for (lr.epoch = 0; lr.epoch < lr.epochs; ++lr.epoch) { llama_opt_epoch(ctx.get(), dataset, result_train, result_eval, idata_split, - ggml_opt_epoch_callback_progress_bar, ggml_opt_epoch_callback_progress_bar); + ggml_opt_epoch_callback_progress_bar, ggml_opt_epoch_callback_progress_bar); fprintf(stderr, "\n"); ggml_opt_result_reset(result_train); @@ -88,7 +88,7 @@ int main(int argc, char ** argv) { ggml_opt_result_free(result_train); ggml_opt_result_free(result_eval); - llama_model_save_to_file(model.get(), "finetuned-model.gguf"); + llama_model_save_to_file(model.get(), params.out_file.c_str()); llama_backend_free(); diff --git a/flake.nix b/flake.nix index 0b5edf911f..bb02c8e52f 100644 --- a/flake.nix +++ b/flake.nix @@ -36,9 +36,6 @@ # ``` # nixConfig = { # extra-substituters = [ - # # Populated by the CI in ggml-org/llama.cpp - # "https://llama-cpp.cachix.org" - # # # A development cache for nixpkgs imported with `config.cudaSupport = true`. # # Populated by https://hercules-ci.com/github/SomeoneSerge/nixpkgs-cuda-ci. # # This lets one skip building e.g. the CUDA-enabled openmpi. @@ -47,10 +44,8 @@ # ]; # # # Verify these are the same keys as published on - # # - https://app.cachix.org/cache/llama-cpp # # - https://app.cachix.org/cache/cuda-maintainers # extra-trusted-public-keys = [ - # "llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc=" # "cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E=" # ]; # }; diff --git a/ggml/CMakeLists.txt b/ggml/CMakeLists.txt index 1fb7abeaf0..90e274ccdb 100644 --- a/ggml/CMakeLists.txt +++ b/ggml/CMakeLists.txt @@ -188,6 +188,7 @@ option(GGML_VULKAN_VALIDATE "ggml: enable Vulkan validation" option(GGML_VULKAN_RUN_TESTS "ggml: run Vulkan tests" OFF) option(GGML_WEBGPU "ggml: use WebGPU" OFF) option(GGML_WEBGPU_DEBUG "ggml: enable WebGPU debug output" OFF) +option(GGML_ZDNN "ggml: use zDNN" OFF) option(GGML_METAL "ggml: use Metal" ${GGML_METAL_DEFAULT}) option(GGML_METAL_USE_BF16 "ggml: use bfloat if available" OFF) option(GGML_METAL_NDEBUG "ggml: disable Metal debugging" OFF) diff --git a/ggml/include/ggml-opt.h b/ggml/include/ggml-opt.h index 74ec080a05..4703a05afe 100644 --- a/ggml/include/ggml-opt.h +++ b/ggml/include/ggml-opt.h @@ -74,16 +74,26 @@ extern "C" { GGML_OPT_BUILD_TYPE_OPT = 30, }; + enum ggml_opt_optimizer_type { + GGML_OPT_OPTIMIZER_TYPE_ADAMW, + GGML_OPT_OPTIMIZER_TYPE_SGD, + + GGML_OPT_OPTIMIZER_TYPE_COUNT + }; + // parameters that control which optimizer is used and how said optimizer tries to find the minimal loss struct ggml_opt_optimizer_params { - // AdamW optimizer parameters struct { float alpha; // learning rate - float beta1; - float beta2; + float beta1; // first AdamW momentum + float beta2; // second AdamW momentum float eps; // epsilon for numerical stability - float wd; // weight decay for AdamW, use 0.0f to disable + float wd; // weight decay - 0.0f to disable } adamw; + struct { + float alpha; // learning rate + float wd; // weight decay + } sgd; }; // callback to calculate optimizer parameters prior to a backward pass @@ -112,8 +122,11 @@ extern "C" { int32_t opt_period; // after how many gradient accumulation steps an optimizer step should be done - ggml_opt_get_optimizer_params get_opt_pars; // callback for calculating optimizer parameters - void * get_opt_pars_ud; // userdata for calculating optimizer parameters + ggml_opt_get_optimizer_params get_opt_pars; // callback for calculating optimizer parameters + void * get_opt_pars_ud; // userdata for calculating optimizer parameters + + // only GGML_OPT_OPTIMIZER_TYPE_ADAMW needs m, v momenta per parameter tensor + enum ggml_opt_optimizer_type optimizer; }; // get parameters for an optimization context with defaults set where possible @@ -142,6 +155,10 @@ extern "C" { // get the gradient accumulator for a node from the forward graph GGML_API struct ggml_tensor * ggml_opt_grad_acc(ggml_opt_context_t opt_ctx, struct ggml_tensor * node); + GGML_API enum ggml_opt_optimizer_type ggml_opt_context_optimizer_type(ggml_opt_context_t); //TODO consistent naming scheme + + GGML_API const char * ggml_opt_optimizer_name(enum ggml_opt_optimizer_type); + // ====== Optimization Result ====== GGML_API ggml_opt_result_t ggml_opt_result_init(void); @@ -226,12 +243,14 @@ extern "C" { struct ggml_tensor * outputs, // output tensor, must have shape [ne_label, ndata_batch] if labels are used ggml_opt_dataset_t dataset, // dataset with data and optionally also labels enum ggml_opt_loss_type loss_type, // loss to minimize + enum ggml_opt_optimizer_type optimizer, // sgd or adamw ggml_opt_get_optimizer_params get_opt_pars, // callback to get optimizer params, userdata is pointer to epoch (of type int64_t) int64_t nepoch, // how many times the dataset should be iterated over int64_t nbatch_logical, // datapoints optimizer step, must be a multiple of ndata_batch in inputs/outputs float val_split, // fraction of the dataset to use for validation, must be in [0.0f, 1.0f) bool silent); // whether or not info prints to stderr should be suppressed + #ifdef __cplusplus } #endif diff --git a/ggml/include/ggml-zdnn.h b/ggml/include/ggml-zdnn.h new file mode 100644 index 0000000000..c2c30c977c --- /dev/null +++ b/ggml/include/ggml-zdnn.h @@ -0,0 +1,16 @@ +#pragma once + +#include "ggml.h" +#include "ggml-backend.h" + +#ifdef __cplusplus +extern "C" { +#endif + +GGML_BACKEND_API ggml_backend_t ggml_backend_zdnn_init(void); + +GGML_BACKEND_API ggml_backend_reg_t ggml_backend_zdnn_reg(void); + +#ifdef __cplusplus +} +#endif diff --git a/ggml/include/ggml.h b/ggml/include/ggml.h index 2f06e1e39b..da8813fd27 100644 --- a/ggml/include/ggml.h +++ b/ggml/include/ggml.h @@ -241,6 +241,8 @@ #define GGML_ROPE_TYPE_MROPE 8 #define GGML_ROPE_TYPE_VISION 24 +#define GGML_MROPE_SECTIONS 4 + #define GGML_UNUSED(x) (void)(x) #define GGML_PAD(x, n) (((x) + (n) - 1) & ~((n) - 1)) @@ -540,6 +542,7 @@ extern "C" { GGML_OP_CROSS_ENTROPY_LOSS, GGML_OP_CROSS_ENTROPY_LOSS_BACK, GGML_OP_OPT_STEP_ADAMW, + GGML_OP_OPT_STEP_SGD, GGML_OP_GLU, @@ -1660,7 +1663,7 @@ extern "C" { struct ggml_tensor * b, struct ggml_tensor * c, int n_dims, - int sections[4], + int sections[GGML_MROPE_SECTIONS], int mode, int n_ctx_orig, float freq_base, @@ -1686,6 +1689,22 @@ extern "C" { float beta_fast, float beta_slow); + GGML_API struct ggml_tensor * ggml_rope_multi_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + int n_dims, + int sections[GGML_MROPE_SECTIONS], + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow); + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_rope_custom( struct ggml_context * ctx, struct ggml_tensor * a, @@ -2293,7 +2312,14 @@ extern "C" { struct ggml_tensor * grad, struct ggml_tensor * m, struct ggml_tensor * v, - struct ggml_tensor * adamw_params); // parameters such a the learning rate + struct ggml_tensor * adamw_params); // parameters such as the learning rate + + // stochastic gradient descent step (with weight decay) + GGML_API struct ggml_tensor * ggml_opt_step_sgd( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * grad, + struct ggml_tensor * sgd_params); // alpha, weight decay // // automatic differentiation diff --git a/ggml/src/CMakeLists.txt b/ggml/src/CMakeLists.txt index 177fb28213..2b5b8169d7 100644 --- a/ggml/src/CMakeLists.txt +++ b/ggml/src/CMakeLists.txt @@ -382,6 +382,7 @@ ggml_add_backend(RPC) ggml_add_backend(SYCL) ggml_add_backend(Vulkan) ggml_add_backend(WebGPU) +ggml_add_backend(zDNN) ggml_add_backend(OpenCL) foreach (target ggml-base ggml) diff --git a/ggml/src/ggml-backend-reg.cpp b/ggml/src/ggml-backend-reg.cpp index 6c31513750..5f02a710a1 100644 --- a/ggml/src/ggml-backend-reg.cpp +++ b/ggml/src/ggml-backend-reg.cpp @@ -49,6 +49,10 @@ #include "ggml-webgpu.h" #endif +#ifdef GGML_USE_ZDNN +#include "ggml-zdnn.h" +#endif + #ifdef GGML_USE_OPENCL #include "ggml-opencl.h" #endif @@ -180,6 +184,9 @@ struct ggml_backend_registry { #ifdef GGML_USE_WEBGPU register_backend(ggml_backend_webgpu_reg()); #endif +#ifdef GGML_USE_ZDNN + register_backend(ggml_backend_zdnn_reg()); +#endif #ifdef GGML_USE_OPENCL register_backend(ggml_backend_opencl_reg()); #endif diff --git a/ggml/src/ggml-cann/aclnn_ops.cpp b/ggml/src/ggml-cann/aclnn_ops.cpp index 07d6b8b67d..2a5cb8abfa 100755 --- a/ggml/src/ggml-cann/aclnn_ops.cpp +++ b/ggml/src/ggml-cann/aclnn_ops.cpp @@ -753,69 +753,55 @@ static void cann_copy(ggml_backend_cann_context& ctx, aclTensor* acl_src, void ggml_cann_dup(ggml_backend_cann_context& ctx, ggml_tensor* dst) { ggml_tensor* src0 = dst->src[0]; - aclTensor* acl_src = ggml_cann_create_tensor(src0); - aclTensor* acl_dst = ggml_cann_create_tensor(dst); if (ggml_are_same_shape(src0, dst)) { + aclTensor* acl_src = ggml_cann_create_tensor(src0); + aclTensor* acl_dst = ggml_cann_create_tensor(dst); if (dst->type == src0->type) { cann_copy(ctx, acl_src, acl_dst); } else { aclnn_cast(ctx, acl_src, acl_dst, ggml_cann_type_mapping(dst->type)); } + ggml_cann_release_resources(ctx, acl_src, acl_dst); } else { - if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst)) { - if (dst->type == src0->type) { - size_t cpy_size = ggml_nbytes(dst); - ggml_cann_async_memcpy(ctx, dst->data, src0->data, cpy_size, - ACL_MEMCPY_DEVICE_TO_DEVICE); - return; - } else { - ggml_cann_pool_alloc src_buffer_allocator( - ctx.pool(), - ggml_nelements(dst) * ggml_type_size(dst->type)); - void* src_trans_buffer = src_buffer_allocator.get(); - size_t src_trans_nb[GGML_MAX_DIMS]; - src_trans_nb[0] = ggml_type_size(dst->type); - for (int i = 1; i < GGML_MAX_DIMS; i++) { - src_trans_nb[i] = src_trans_nb[i - 1] * src0->ne[i - 1]; - } - aclTensor* src_trans_tensor = ggml_cann_create_tensor( - src_trans_buffer, ggml_cann_type_mapping(dst->type), - ggml_type_size(dst->type), src0->ne, src_trans_nb, - GGML_MAX_DIMS); - - aclnn_cast(ctx, acl_src, src_trans_tensor, ggml_cann_type_mapping(dst->type)); - size_t cpy_size = ggml_nbytes(dst); - ggml_cann_async_memcpy(ctx, dst->data, src_trans_buffer, cpy_size, - ACL_MEMCPY_DEVICE_TO_DEVICE); - ggml_cann_release_resources(ctx, src_trans_tensor); - return; - } - } else if (ggml_is_contiguous(dst)) { - ggml_cann_pool_alloc src_buffer_allocator( - ctx.pool(), ggml_nelements(dst) * ggml_type_size(dst->type)); - void* src_trans_buffer = src_buffer_allocator.get(); + void* src_trans_buffer = src0->data; + ggml_cann_pool_alloc src_buffer_allocator; + if (!ggml_is_contiguous(src0)) { + aclTensor* acl_src = ggml_cann_create_tensor(src0); + src_buffer_allocator.alloc(ctx.pool(), + ggml_nelements(src0) * ggml_type_size(src0->type)); + src_trans_buffer = src_buffer_allocator.get(); size_t src_trans_nb[GGML_MAX_DIMS]; - src_trans_nb[0] = ggml_type_size(dst->type); + src_trans_nb[0] = ggml_type_size(src0->type); for (int i = 1; i < GGML_MAX_DIMS; i++) { src_trans_nb[i] = src_trans_nb[i - 1] * src0->ne[i - 1]; } aclTensor* src_trans_tensor = ggml_cann_create_tensor( - src_trans_buffer, ggml_cann_type_mapping(dst->type), - ggml_type_size(dst->type), src0->ne, src_trans_nb, + src_trans_buffer, ggml_cann_type_mapping(src0->type), + ggml_type_size(src0->type), src0->ne, src_trans_nb, GGML_MAX_DIMS); - - aclnn_cast(ctx, acl_src, src_trans_tensor, ggml_cann_type_mapping(dst->type)); - - size_t cpy_size = ggml_nbytes(dst); - ggml_cann_async_memcpy(ctx, dst->data, src_trans_buffer, cpy_size, - ACL_MEMCPY_DEVICE_TO_DEVICE); - ggml_cann_release_resources(ctx, src_trans_tensor); - return; - } else { - GGML_ABORT("Unsupport dst is not tontiguous."); + cann_copy(ctx, acl_src, src_trans_tensor); + ggml_cann_release_resources(ctx, acl_src, src_trans_tensor); } + + size_t src_reshape_nb[GGML_MAX_DIMS]; + src_reshape_nb[0] = ggml_type_size(src0->type); + for (int i = 1; i < GGML_MAX_DIMS; i++) { + src_reshape_nb[i] = src_reshape_nb[i - 1] * dst->ne[i - 1]; + } + + aclTensor* trans_acl_src = ggml_cann_create_tensor(src_trans_buffer, + ggml_cann_type_mapping(src0->type),ggml_type_size(src0->type), + dst->ne, src_reshape_nb, GGML_MAX_DIMS, ACL_FORMAT_ND); + aclTensor* acl_dst = ggml_cann_create_tensor(dst); + + if (dst->type == src0->type) { + cann_copy(ctx, trans_acl_src, acl_dst); + } else { + aclnn_cast(ctx, trans_acl_src, acl_dst, ggml_cann_type_mapping(dst->type)); + } + ggml_cann_release_resources(ctx, trans_acl_src, acl_dst); } - ggml_cann_release_resources(ctx, acl_src, acl_dst); + return; } /** @@ -1330,160 +1316,196 @@ static void aclnn_pow_tensor_tensor(ggml_backend_cann_context& ctx, } /** - * @brief Applies the Alibi (Attention with Linear Biases) mechanism to the - * @details This function implements the Alibi mechanism, which introduces - * learnable biases into the attention scores to simulate relative - * position encoding without the need for explicit positional - * embeddings. + * @brief Generate a range of values and apply a scalar base exponentiation. * - * @param ctx The backend CANN context for executing operations. - * @param acl_src The source tensor representing the query or key. - * @param acl_position The position tensor containing relative positions. - * @param acl_dst The destination tensor where the result will be stored. - * @param n_head The number of attention heads. - * @param src_ne The dimensions of the source tensor. - * @param src_nb0 The byte size of the first dimension of the source - tensor. - * @param max_bias The maximum bias value used in the Alibi mechanism. - * @param dst The destination tensor object for additional metadata. + * This function creates an evenly spaced sequence from `start` to `stop` (exclusive), + * with step size `step`, stores it in a temporary buffer, and then computes: * - * The function performs the following steps: - * 1. Calculates the logarithm floor of the number of heads to determine the - base for bias calculation. - * 2. Initializes arrays with arithmetic sequences and fills them with bias - values. - * 3. Computes the bias tensor based on the calculated biases and arithmetic - sequences. - * 4. Reshapes the bias tensor to match the dimensions of the input tensors. - * 5. Multiplies the position tensor by the bias tensor. - * 6. Adds the result of the multiplication to the source tensor to produce the - final output. + * @f[ + * slope[i] = m^{\left( start + i \cdot step \right)}, \quad 0 \le i < size + * @f] + * + * The results are written to the provided @p slope_buffer. + * + * @param ctx CANN backend context for memory allocation and operator execution. + * @param slope_buffer Pointer to the output buffer (float array) for the computed slope values. + * @param m Scalar base for the exponentiation. + * @param size Number of elements in the generated sequence. + * @param start Starting exponent offset. + * @param stop Stopping exponent offset (exclusive). + * @param step Step size for the exponent increment. */ -static void aclnn_alibi(ggml_backend_cann_context& ctx, aclTensor* acl_src, - aclTensor* acl_position, aclTensor* acl_dst, - const int n_head, int64_t* src_ne, const size_t src_nb0, - float max_bias, ggml_tensor* dst) { - const int64_t ne2_ne3 = src_ne[2] * src_ne[3]; - GGML_ASSERT(src_nb0 == sizeof(float)); - GGML_ASSERT(n_head == src_ne[2]); +static void aclnn_get_slope_inner(ggml_backend_cann_context& ctx, void* slope_buffer, + float m, int64_t size, float start, float stop, float step){ + int64_t ne[] = {size}; + size_t nb[] = {sizeof(float)}; - const int n_heads_log2_floor = 1u << (uint32_t)floor(log2(n_head)); + ggml_cann_pool_alloc arange_allocator(ctx.pool(), size * sizeof(float)); + void* arange_buffer = arange_allocator.get(); - float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); - float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); + aclTensor* arange_tensor = ggml_cann_create_tensor( + arange_buffer, ACL_FLOAT, sizeof(float), ne, nb, 1); + aclnn_arange(ctx, arange_tensor, start, stop, step, size); - // init arange - ggml_cann_pool_alloc arange_allocator(ctx.pool(), - ne2_ne3 * ggml_type_size(dst->type)); - void* tmp_arange_buffer = arange_allocator.get(); + aclTensor* slope_tensor = ggml_cann_create_tensor( + slope_buffer, ACL_FLOAT, sizeof(float), ne, nb, 1); - // arange1: [1, ..., n_heads_log2_floor+1) - float start = 1; - float stop = n_heads_log2_floor + 1; - float step = 1; - int64_t n_elements_arange = n_heads_log2_floor; + aclScalar* sc = aclCreateScalar(&m, aclDataType::ACL_FLOAT); - int64_t tmp_arange1_ne[] = {n_heads_log2_floor}; - size_t tmp_arange1_nb[] = {sizeof(dst->type)}; - aclTensor* tmp_arange1_tensor = ggml_cann_create_tensor( - tmp_arange_buffer, ggml_cann_type_mapping(dst->type), - ggml_type_size(dst->type), tmp_arange1_ne, tmp_arange1_nb, - GGML_MAX_DIMS - 3, ACL_FORMAT_ND); - - aclnn_arange(ctx, tmp_arange1_tensor, start, stop, step, n_elements_arange); - - aclTensor* tmp_arange2_tensor = nullptr; - if (n_heads_log2_floor < ne2_ne3) { - // arange2: [1, ..., 2 * (k - n_heads_log2_floor) + 1) - start = 1; - stop = 2 * (ne2_ne3 - n_heads_log2_floor) + 1; - step = 2; - n_elements_arange = ne2_ne3 - n_heads_log2_floor; - int64_t tmp_arange2_ne[] = {ne2_ne3 - n_heads_log2_floor}; - size_t tmp_arange2_nb[] = {sizeof(dst->type)}; - - aclTensor* tmp_arange2_tensor = ggml_cann_create_tensor( - (char*)tmp_arange_buffer + - n_heads_log2_floor * ggml_type_size(dst->type), - ggml_cann_type_mapping(dst->type), ggml_type_size(dst->type), - tmp_arange2_ne, tmp_arange2_nb, GGML_MAX_DIMS - 3, ACL_FORMAT_ND); - aclnn_arange(ctx, tmp_arange2_tensor, start, stop, step, - n_elements_arange); - } - - // init mk_base - ggml_cann_pool_alloc mk_base_allocator(ctx.pool(), - ne2_ne3 * ggml_type_size(dst->type)); - void* tmp_mk_base_buffer = mk_base_allocator.get(); - int64_t tmp_mk_base1_ne[] = {n_heads_log2_floor}; - size_t tmp_mk_base1_nb[] = {sizeof(dst->type)}; - aclTensor* tmp_mk_base1_tensor = ggml_cann_create_tensor( - tmp_mk_base_buffer, ggml_cann_type_mapping(dst->type), - ggml_type_size(dst->type), tmp_mk_base1_ne, tmp_mk_base1_nb, - GGML_MAX_DIMS - 3, ACL_FORMAT_ND); - - aclnn_fill_scalar(ctx, m0, tmp_mk_base1_tensor); - - aclTensor* tmp_mk_base2_tensor = nullptr; - if (n_heads_log2_floor < ne2_ne3) { - int64_t tmp_mk_base2_ne[] = {ne2_ne3 - n_heads_log2_floor}; - size_t tmp_mk_base2_nb[] = {sizeof(dst->type)}; - aclTensor* tmp_mk_base2_tensor = ggml_cann_create_tensor( - (char*)tmp_mk_base_buffer + - n_heads_log2_floor * ggml_type_size(dst->type), - ggml_cann_type_mapping(dst->type), ggml_type_size(dst->type), - tmp_mk_base2_ne, tmp_mk_base2_nb, GGML_MAX_DIMS - 3, ACL_FORMAT_ND); - aclnn_fill_scalar(ctx, m1, tmp_mk_base2_tensor); - } - - // init mk - int64_t tmp_mk_base_ne[] = {ne2_ne3}; - size_t tmp_mk_base_nb[] = {sizeof(dst->type)}; - aclTensor* tmp_mk_base_tensor = ggml_cann_create_tensor( - tmp_mk_base_buffer, ggml_cann_type_mapping(dst->type), - ggml_type_size(dst->type), tmp_mk_base_ne, tmp_mk_base_nb, - GGML_MAX_DIMS - 3, ACL_FORMAT_ND); - aclTensor* tmp_arange_tensor = ggml_cann_create_tensor( - tmp_arange_buffer, ggml_cann_type_mapping(dst->type), - ggml_type_size(dst->type), tmp_mk_base_ne, tmp_mk_base_nb, - GGML_MAX_DIMS - 3, ACL_FORMAT_ND); - aclnn_pow_tensor_tensor(ctx, tmp_mk_base_tensor, tmp_arange_tensor); - - // reshape mk - int64_t tmp_mk_ne[] = {1, 1, src_ne[2], src_ne[3]}; - size_t tmp_mk_nb[GGML_MAX_DIMS]; - tmp_mk_nb[0] = ggml_type_size(dst->type); - for (int i = 1; i < GGML_MAX_DIMS; i++) { - tmp_mk_nb[i] = tmp_mk_nb[i - 1] * tmp_mk_ne[i - 1]; - } - aclTensor* tmp_mk_tensor = ggml_cann_create_tensor( - tmp_mk_base_buffer, ggml_cann_type_mapping(dst->type), - ggml_type_size(dst->type), tmp_mk_ne, tmp_mk_nb, GGML_MAX_DIMS, - ACL_FORMAT_ND); - - // acl_position * mk - int64_t tmp_output_ne[] = {src_ne[0], src_ne[1], src_ne[2], src_ne[3]}; - size_t tmp_output_nb[GGML_MAX_DIMS]; - tmp_output_nb[0] = ggml_type_size(dst->type); - for (int i = 1; i < GGML_MAX_DIMS; i++) { - tmp_output_nb[i] = tmp_output_nb[i - 1] * tmp_output_ne[i - 1]; - } - ggml_cann_pool_alloc output_allocator(ctx.pool(), ggml_nbytes(dst)); - void* tmp_output_buffer = output_allocator.get(); - aclTensor* tmp_output_tensor = ggml_cann_create_tensor( - tmp_output_buffer, ggml_cann_type_mapping(dst->type), - ggml_type_size(dst->type), tmp_output_ne, tmp_output_nb, GGML_MAX_DIMS, - ACL_FORMAT_ND); - aclnn_mul(ctx, acl_position, tmp_mk_tensor, tmp_output_tensor); - - // add - aclnn_add(ctx, tmp_output_tensor, acl_src, acl_dst); - ggml_cann_release_resources(ctx, tmp_arange1_tensor, tmp_arange2_tensor, - tmp_mk_base1_tensor, tmp_mk_base2_tensor, tmp_mk_base_tensor, - tmp_arange_tensor, tmp_mk_tensor, tmp_output_tensor); + GGML_CANN_CALL_ACLNN_OP(ctx, PowScalarTensor, sc, arange_tensor, slope_tensor); + ggml_cann_release_resources(ctx, sc, arange_tensor, slope_tensor); } -void ggml_cann_cpy(ggml_backend_cann_context& ctx, ggml_tensor* dst) { +/** + * @brief Compute slope values for multiple attention heads based on ALiBi bias parameters. + * + * This function generates slope values for each attention head according to the ALiBi + * (Attention with Linear Biases) method. It splits the computation into two ranges depending + * on whether the head index is less than @p n_head_log2 or not, and uses different base values + * (`m0` and `m1`) for the exponentiation. + * + * @f[ + * slope[h] = + * \begin{cases} + * m_0^{(h + 1)}, & h < n\_head\_log2 \\ + * m_1^{\left( 2 \cdot (h - n\_head\_log2) + 1 \right)}, & h \geq n\_head\_log2 + * \end{cases} + * \quad , \quad \text{if } max\_bias > 0 + * @f] + * + * If @p max_bias <= 0, all slope values are set to 1.0. + * + * @param ctx CANN backend context for memory allocation and operator execution. + * @param n_head Total number of attention heads. + * @param slope_buffer Pointer to the output buffer (float array) for storing slopes. + * @param max_bias Maximum bias value for slope computation. + * +*/ +static void aclnn_get_slope(ggml_backend_cann_context & ctx, int64_t n_head, + void* slope_buffer, float max_bias) { + const int n_head_log2 = 1u << (uint32_t) floor(log2(n_head)); + + float m0 = powf(2.0f, -(max_bias) / n_head_log2); + float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + + // const float slope = (max_bias > 0.0f) ? + // h < n_head_log2 ? + // powf(m0, h + 1) : + // powf(m1, 2*(h - n_head_log2) + 1) : + // 1.0f; + // arange1 + float start = 0 + 1; + float end = (n_head_log2 - 1) + 1; + float step = 1; + float count = n_head_log2; + // end needs to be +1 because aclnn uses a left-closed, right-open interval. + aclnn_get_slope_inner(ctx, slope_buffer, m0, count, start, end + 1, step); + if (n_head_log2 < n_head) { + // arange2 + start = 2 * (n_head_log2 - n_head_log2) + 1; + end = 2 * ((n_head - 1) - n_head_log2) + 1; + step = 2; + count = n_head - n_head_log2; + aclnn_get_slope_inner( + ctx, (char *) slope_buffer + n_head_log2 * sizeof(float), + m1, count, start, end + 1, step); + } +} + +/** + * @brief Add ALiBi (Attention with Linear Biases) positional biases to the attention mask. + * + * This function computes the ALiBi slopes for each attention head (if max_bias > 0), + * multiplies them with the attention mask to produce bias tensors, and adds these biases + * to the destination tensor (@p dst). + * + * The function performs necessary broadcasting of the mask and slope tensors to match + * the shape of the destination tensor, then applies element-wise multiplication and addition + * using CANN operators. + * + * @param ctx CANN backend context for memory management and operator execution. + * @param mask Input attention mask tensor, assumed to be contiguous. + * @param dst Destination tensor to which ALiBi biases will be added. + * @param dst_ptr Pointer to the memory of the destination tensor. + * @param max_bias Maximum bias value controlling the slope scaling. + * + * @note + * - Write data into dst_ptr using only the shape information of the dst tensor. + * - `GGML_MAX_DIMS + 2` is used to extend tensor dimensions for broadcasting. + */ +static void aclnn_add_alibi(ggml_backend_cann_context& ctx, ggml_tensor* mask, + ggml_tensor* dst, void* dst_ptr, float max_bias) { + void* slope_buffer = nullptr; + void* bias_buffer = nullptr; + + if (max_bias > 0.0f) { + int64_t n_heads = dst->ne[2]; + ggml_cann_pool_alloc slope_allocator(ctx.pool(), n_heads * sizeof(float)); + slope_buffer = slope_allocator.get(); + ggml_cann_pool_alloc bias_allocator( + ctx.pool(), ggml_nelements(dst) * ggml_element_size(dst)); + bias_buffer = bias_allocator.get(); + aclnn_get_slope(ctx, n_heads, slope_buffer, max_bias); + } + + // broadcast for mask, slop and dst; + int64_t nr2 = dst->ne[2] / mask->ne[2]; + int64_t nr3 = dst->ne[3] / mask->ne[3]; + + // broadcast the mask across rows + int64_t mask_ne[] = { mask->ne[0], dst->ne[1], mask->ne[2], 1, mask->ne[3], 1 }; + size_t mask_nb[] = { + mask_nb[0] = mask->nb[0], mask_nb[1] = mask->nb[1], mask_nb[2] = mask->nb[2], + mask_nb[3] = mask->nb[2], mask_nb[4] = mask->nb[3], mask_nb[5] = mask->nb[3] + }; + + int64_t dst_ne[] = { dst->ne[0], dst->ne[1], mask->ne[2], nr2, mask->ne[3], nr3 }; + size_t dst_nb[] = { + dst_nb[0] = dst->nb[0], dst_nb[1] = dst->nb[1], dst_nb[2] = dst->nb[2], + dst_nb[3] = dst->nb[2], dst_nb[4] = dst->nb[3], dst_nb[5] = dst->nb[3] + }; + + // slope is a 1 dim tensor, slope.ne2 == dst.ne2 + int64_t slope_ne[] = { 1, 1, mask->ne[2], nr2, 1, 1 }; + size_t slope_nb[GGML_MAX_DIMS + 2]; + slope_nb[0] = sizeof(float); + for (int i = 1; i < GGML_MAX_DIMS + 2; i++) { + slope_nb[i] = slope_nb[i - 1] * slope_ne[i - 1]; + } + + aclTensor* acl_slope = ggml_cann_create_tensor( + slope_buffer, ACL_FLOAT, sizeof(float), + slope_ne, slope_nb, GGML_MAX_DIMS + 2); + aclTensor* acl_mask = ggml_cann_create_tensor( + mask, mask_ne, mask_nb, GGML_MAX_DIMS + 2); + + // write data into dst_ptr using only the shape information of the dst tensor. + aclTensor* acl_dst = ggml_cann_create_tensor( + dst_ptr, ggml_cann_type_mapping(dst->type), + ggml_type_size(dst->type), dst_ne, dst_nb, + GGML_MAX_DIMS + 2); + + if (max_bias > 0.0f) { + int64_t bias_ne[] = { mask->ne[0], dst->ne[1], mask->ne[2], nr2, mask->ne[3], 1 }; + size_t bias_nb[GGML_MAX_DIMS + 2]; + bias_nb[0] = sizeof(float); + for (int i = 1; i < GGML_MAX_DIMS + 2; i++) { + bias_nb[i] = bias_nb[i - 1] * bias_ne[i - 1]; + } + aclTensor* bias_tensor = ggml_cann_create_tensor( + bias_buffer, ACL_FLOAT, sizeof(float), + bias_ne, bias_nb, GGML_MAX_DIMS + 2); + + aclnn_mul(ctx, acl_slope, acl_mask, bias_tensor); + aclnn_add(ctx, acl_dst, bias_tensor); + ggml_cann_release_resources(ctx, bias_tensor); + } else { + aclnn_add(ctx, acl_dst, acl_mask); + } + ggml_cann_release_resources(ctx, acl_slope, acl_mask, acl_dst); +} + +void ggml_cann_cpy(ggml_backend_cann_context & ctx, ggml_tensor * dst) { ggml_cann_dup(ctx, dst); } @@ -1501,118 +1523,41 @@ void ggml_cann_cpy(ggml_backend_cann_context& ctx, ggml_tensor* dst) { * @param acl_dst The destination tensor where the softmax results will be * stored. */ -static void aclnn_softmax(ggml_backend_cann_context& ctx, aclTensor* acl_src, - int64_t dim, aclTensor* acl_dst) { +static void aclnn_softmax(ggml_backend_cann_context & ctx, + aclTensor* acl_src, int64_t dim, aclTensor * acl_dst) { GGML_CANN_CALL_ACLNN_OP(ctx, Softmax, acl_src, dim, acl_dst); } -void ggml_cann_softmax(ggml_backend_cann_context& ctx, ggml_tensor* dst) { +void ggml_cann_softmax(ggml_backend_cann_context & ctx, ggml_tensor * dst) { ggml_tensor* src0 = dst->src[0]; ggml_tensor* src1 = dst->src[1]; // mask aclTensor* acl_src0 = ggml_cann_create_tensor(src0); - aclTensor* acl_dst = ggml_cann_create_tensor(dst); + aclTensor* acl_dst = ggml_cann_create_tensor(dst); - float scale = 1.0f; + float scale = 1.0f; float max_bias = 0.0f; - memcpy(&scale, (float*)dst->op_params + 0, sizeof(float)); - memcpy(&max_bias, (float*)dst->op_params + 1, sizeof(float)); + memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + memcpy(&max_bias, (float *) dst->op_params + 1, sizeof(float)); // input mul scale aclScalar* acl_scale = aclCreateScalar(&scale, aclDataType::ACL_FLOAT); + ggml_cann_pool_alloc src_tensor_allocator(ctx.pool(), ggml_nbytes(src0)); + void* src_tensor_buffer = src_tensor_allocator.get(); + aclTensor* softmax_tensor = ggml_cann_create_tensor( + src_tensor_buffer, ggml_cann_type_mapping(src0->type), + ggml_element_size(src0), src0->ne, src0->nb,GGML_MAX_DIMS); - size_t n_bytes = ggml_nbytes(src0); - ggml_cann_pool_alloc mul_scale_allocator(ctx.pool(), n_bytes); - void* input_mul_scale_buffer = mul_scale_allocator.get(); - aclTensor* acl_input_mul_scale_tensor = ggml_cann_create_tensor( - input_mul_scale_buffer, ACL_FLOAT, ggml_type_size(src0->type), src0->ne, - src0->nb, GGML_MAX_DIMS); - - bool inplace = false; - aclnn_muls(ctx, acl_src0, scale, acl_input_mul_scale_tensor, inplace); + aclnn_muls(ctx, acl_src0, scale, softmax_tensor, false); // mask - aclTensor* acl_src1_fp32_tensor = nullptr; - aclTensor* tmp_mask_tensor = nullptr; - ggml_cann_pool_alloc src1_fp32_allocator(ctx.pool()); if (src1) { - const bool use_f16 = src1->type == GGML_TYPE_F16; - if (use_f16) { - // cast to fp32 - size_t n_bytes = ggml_nelements(src1) * sizeof(float_t); - size_t src1_fp32_nb[GGML_MAX_DIMS]; - src1_fp32_nb[0] = sizeof(float_t); - for (int i = 1; i < GGML_MAX_DIMS; i++) { - src1_fp32_nb[i] = src1_fp32_nb[i - 1] * src1->ne[i - 1]; - } - src1_fp32_allocator.alloc(n_bytes); - void* src1_fp32_buffer = src1_fp32_allocator.get(); - acl_src1_fp32_tensor = ggml_cann_create_tensor( - src1_fp32_buffer, ACL_FLOAT, sizeof(float), src1->ne, - src1_fp32_nb, GGML_MAX_DIMS); - aclTensor* acl_src1 = ggml_cann_create_tensor(src1); - aclnn_cast(ctx, acl_src1, acl_src1_fp32_tensor, ACL_FLOAT); - ggml_cann_release_resources(ctx, acl_src1); - } else { - acl_src1_fp32_tensor = ggml_cann_create_tensor(src1); - } - - // broadcast the mask across rows, only use ne11 of ne01 in mask - if (src1->ne[1] != src0->ne[1]) { - // mask shape: [1,1,ne11,ne10] - int64_t tmp_mask_ne[] = {src0->ne[0], src0->ne[1], 1, 1}; - size_t tmp_mask_nb[GGML_MAX_DIMS]; - tmp_mask_nb[0] = sizeof(float_t); - for (int i = 1; i < GGML_MAX_DIMS; i++) { - tmp_mask_nb[i] = tmp_mask_nb[i - 1] * tmp_mask_ne[i - 1]; - } - tmp_mask_tensor = ggml_cann_create_tensor( - src1->data, ACL_FLOAT, sizeof(float), tmp_mask_ne, tmp_mask_nb, - GGML_MAX_DIMS, ACL_FORMAT_ND); - } - - // alibi - const int n_head = src0->ne[2]; - const size_t src_nb0 = src0->nb[0]; - - n_bytes = ggml_nbytes(dst); - ggml_cann_pool_alloc output_allocator(ctx.pool(), n_bytes); - void* output_buffer = output_allocator.get(); - aclTensor* alibi_output_tensor = ggml_cann_create_tensor( - output_buffer, ACL_FLOAT, ggml_type_size(dst->type), dst->ne, - dst->nb, GGML_MAX_DIMS); - if (max_bias <= 0.0f) { - // slope = 1.0 - if (tmp_mask_tensor) { - aclnn_add(ctx, tmp_mask_tensor, acl_input_mul_scale_tensor, - alibi_output_tensor); - } else { - aclnn_add(ctx, acl_src1_fp32_tensor, acl_input_mul_scale_tensor, - alibi_output_tensor); - } - } else { - // slope != 1.0 - if (tmp_mask_tensor) { - aclnn_alibi(ctx, acl_input_mul_scale_tensor, tmp_mask_tensor, - alibi_output_tensor, n_head, src0->ne, src_nb0, - max_bias, dst); - } else { - aclnn_alibi(ctx, acl_input_mul_scale_tensor, - acl_src1_fp32_tensor, alibi_output_tensor, n_head, - src0->ne, src_nb0, max_bias, dst); - } - } - - // softmax - aclnn_softmax(ctx, alibi_output_tensor, 3, acl_dst); - ggml_cann_release_resources(ctx, alibi_output_tensor); - } else { - aclnn_softmax(ctx, acl_input_mul_scale_tensor, 3, acl_dst); + aclnn_add_alibi(ctx, src1, src0, src_tensor_buffer, max_bias); } - - ggml_cann_release_resources(ctx, acl_src0, acl_src1_fp32_tensor, acl_dst, - acl_scale, acl_input_mul_scale_tensor, tmp_mask_tensor); + // softmax + aclnn_softmax(ctx, softmax_tensor, 3, acl_dst); + ggml_cann_release_resources(ctx, acl_src0, acl_dst, acl_scale, softmax_tensor); } /** @@ -2209,86 +2154,129 @@ static void aclnn_cache_init(ggml_backend_cann_context& ctx, ggml_tensor* dst, GGML_TENSOR_BINARY_OP_LOCALS - // theta_scale arange, [0,1,...,ne00/2 - 1] int64_t theta_scale_length = ne00 / 2; - ggml_cann_pool_alloc theta_scale_allocator(ctx.pool(), - theta_scale_length * sizeof(float_t)); - void* theta_scale_buffer = theta_scale_allocator.get(); int64_t theta_scale_ne[] = {theta_scale_length, 1, 1, 1}; size_t theta_scale_nb[] = {sizeof(float_t), sizeof(float_t), sizeof(float_t), theta_scale_length * sizeof(float_t)}; - aclTensor* acl_theta_scale_tensor = - ggml_cann_create_tensor(theta_scale_buffer, ACL_FLOAT, sizeof(float_t), - theta_scale_ne, theta_scale_nb, GGML_MAX_DIMS); - float start = 0; - float step = 1; - float stop = ne00 / 2; - float n_elements = ne00 / 2; - aclnn_arange(ctx, acl_theta_scale_tensor, start, stop, step, n_elements); - - // power - aclScalar* acl_theta_scale = aclCreateScalar(&theta_scale, aclDataType::ACL_FLOAT); - GGML_CANN_CALL_ACLNN_OP(ctx, PowScalarTensor, acl_theta_scale, acl_theta_scale_tensor, - acl_theta_scale_tensor); - - // freq_scale - if (freq_scale != 1) { - aclnn_muls(ctx, acl_theta_scale_tensor, freq_scale, nullptr, true); - } - - // freq_factors - if (src2) { - aclTensor* acl_freq_factors_tensor = ggml_cann_create_tensor( - src2->data, ggml_cann_type_mapping(src2->type), - ggml_type_size(src2->type), theta_scale_ne, theta_scale_nb, GGML_MAX_DIMS); - aclnn_div(ctx, acl_theta_scale_tensor, acl_freq_factors_tensor); - ggml_cann_release_resources(ctx, acl_freq_factors_tensor); - } - - // position GGML_ASSERT(src1->type == GGML_TYPE_I32); int64_t position_length = src1->ne[0]; int64_t position_ne[] = {1, 1, position_length, 1}; size_t position_nb[] = {sizeof(int32_t), sizeof(int32_t), sizeof(int32_t), sizeof(int32_t) * position_length}; - aclTensor* acl_position_tensor = ggml_cann_create_tensor( - src1->data, ggml_cann_type_mapping(src1->type), - ggml_type_size(src1->type), position_ne, position_nb, GGML_MAX_DIMS); - // power * position - int64_t theta_length = theta_scale_length * position_length; - ggml_cann_pool_alloc theta_allocator(ctx.pool(), - theta_length * sizeof(float_t)); - void* theta_buffer = theta_allocator.get(); int64_t theta_ne[] = {theta_scale_length, 1, position_length, 1}; size_t theta_nb[GGML_MAX_DIMS]; theta_nb[0] = sizeof(float_t); for (int i = 1; i < GGML_MAX_DIMS; i++) { theta_nb[i] = theta_nb[i - 1] * theta_ne[i - 1]; } - aclTensor* acl_theta_tensor = - ggml_cann_create_tensor(theta_buffer, ACL_FLOAT, sizeof(float_t), - theta_ne, theta_nb, GGML_MAX_DIMS); - aclnn_mul(ctx, acl_position_tensor, acl_theta_scale_tensor, - acl_theta_tensor); - // sin/cos - ggml_cann_pool_alloc sin_allocator(ctx.pool(), - theta_length * sizeof(float_t)); - void* sin_buffer = sin_allocator.get(); + bool is_q = (std::strncmp(dst->name, "Qcur-", 5) == 0); + bool is_k = (std::strncmp(dst->name, "Kcur-", 5) == 0); + + // used for accuracy testing + bool is_attention = is_q || is_k; + + if(ctx.init_ptr == nullptr || !is_attention) { + // theta_scale arange, [0,1,...,ne00/2 - 1] + if(ctx.init_ptr != nullptr){ + ACL_CHECK(aclrtFree(ctx.init_ptr)); + } + ACL_CHECK(aclrtMalloc(&ctx.init_ptr, theta_scale_length * sizeof(float_t), ACL_MEM_MALLOC_HUGE_FIRST)); + + aclTensor* acl_theta_scale_tensor = + ggml_cann_create_tensor(ctx.init_ptr, ACL_FLOAT, sizeof(float_t), + theta_scale_ne, theta_scale_nb, GGML_MAX_DIMS); + float start = 0; + float step = 1; + float stop = ne00 / 2; + float n_elements = ne00 / 2; + aclnn_arange(ctx, acl_theta_scale_tensor, start, stop, step, n_elements); + + // power + aclScalar* acl_theta_scale = aclCreateScalar(&theta_scale, aclDataType::ACL_FLOAT); + GGML_CANN_CALL_ACLNN_OP(ctx, PowScalarTensor, acl_theta_scale, acl_theta_scale_tensor, + acl_theta_scale_tensor); + + // freq_scale + if (freq_scale != 1) { + aclnn_muls(ctx, acl_theta_scale_tensor, freq_scale, nullptr, true); + } + + // freq_factors + if (src2) { + aclTensor* acl_freq_factors_tensor = ggml_cann_create_tensor( + src2->data, ggml_cann_type_mapping(src2->type), + ggml_type_size(src2->type), theta_scale_ne, theta_scale_nb, GGML_MAX_DIMS); + aclnn_div(ctx, acl_theta_scale_tensor, acl_freq_factors_tensor); + ggml_cann_release_resources(ctx, acl_freq_factors_tensor); + } + // release + ggml_cann_release_resources(ctx, acl_theta_scale_tensor,acl_theta_scale); + } + + if(ctx.sin_ptr == nullptr) { + int64_t theta_length = theta_scale_length * ctx.max_prompt_length; + ACL_CHECK(aclrtMalloc(&ctx.sin_ptr, theta_length * sizeof(float_t), ACL_MEM_MALLOC_HUGE_FIRST)); + ACL_CHECK(aclrtMalloc(&ctx.cos_ptr, theta_length * sizeof(float_t), ACL_MEM_MALLOC_HUGE_FIRST)); + } + if(position_length > ctx.max_prompt_length) { + ctx.max_prompt_length = position_length; + int64_t theta_length = theta_scale_length * ctx.max_prompt_length; + ACL_CHECK(aclrtFree(ctx.sin_ptr)); + ACL_CHECK(aclrtFree(ctx.cos_ptr)); + ACL_CHECK(aclrtMalloc(&ctx.sin_ptr, theta_length * sizeof(float_t), ACL_MEM_MALLOC_HUGE_FIRST)); + ACL_CHECK(aclrtMalloc(&ctx.cos_ptr, theta_length * sizeof(float_t), ACL_MEM_MALLOC_HUGE_FIRST)); + } + + bool is_fisrt_layer = (std::strncmp(dst->name, "Qcur-0", GGML_MAX_NAME) == 0); + + if(is_fisrt_layer || !is_attention) { + + aclTensor* acl_theta_scale_tensor = + ggml_cann_create_tensor(ctx.init_ptr, ACL_FLOAT, sizeof(float_t), + theta_scale_ne, theta_scale_nb, GGML_MAX_DIMS); + + // position + aclTensor* acl_position_tensor = ggml_cann_create_tensor( + src1->data, ggml_cann_type_mapping(src1->type), + ggml_type_size(src1->type), position_ne, position_nb, GGML_MAX_DIMS); + + // power * position + int64_t theta_length = theta_scale_length * position_length; + ggml_cann_pool_alloc theta_allocator(ctx.pool(), + theta_length * sizeof(float_t)); + void* theta_buffer = theta_allocator.get(); + + aclTensor* acl_theta_tensor = + ggml_cann_create_tensor(theta_buffer, ACL_FLOAT, sizeof(float_t), + theta_ne, theta_nb, GGML_MAX_DIMS); + aclnn_mul(ctx, acl_position_tensor, acl_theta_scale_tensor, + acl_theta_tensor); + + // sin/cos + aclTensor* acl_sin_tensor = ggml_cann_create_tensor( + ctx.sin_ptr, ACL_FLOAT, sizeof(float_t), theta_ne, theta_nb, + GGML_MAX_DIMS, ACL_FORMAT_ND); + aclnn_sin(ctx, acl_theta_tensor, acl_sin_tensor); + + aclTensor* acl_cos_tensor = ggml_cann_create_tensor( + ctx.cos_ptr, ACL_FLOAT, sizeof(float_t), theta_ne, theta_nb, + GGML_MAX_DIMS, ACL_FORMAT_ND); + aclnn_cos(ctx, acl_theta_tensor, acl_cos_tensor); + + // release + ggml_cann_release_resources(ctx, acl_theta_scale_tensor, acl_position_tensor, + acl_theta_tensor, acl_sin_tensor, acl_cos_tensor); + } + aclTensor* acl_sin_tensor = ggml_cann_create_tensor( - sin_buffer, ACL_FLOAT, sizeof(float_t), theta_ne, theta_nb, - GGML_MAX_DIMS, ACL_FORMAT_ND); - aclnn_sin(ctx, acl_theta_tensor, acl_sin_tensor); - - ggml_cann_pool_alloc cos_allocator(ctx.pool(), - theta_length * sizeof(float_t)); - void* cos_buffer = cos_allocator.get(); + ctx.sin_ptr, ACL_FLOAT, sizeof(float_t), theta_ne, theta_nb, + GGML_MAX_DIMS, ACL_FORMAT_ND); aclTensor* acl_cos_tensor = ggml_cann_create_tensor( - cos_buffer, ACL_FLOAT, sizeof(float_t), theta_ne, theta_nb, - GGML_MAX_DIMS, ACL_FORMAT_ND); - aclnn_cos(ctx, acl_theta_tensor, acl_cos_tensor); + ctx.cos_ptr, ACL_FLOAT, sizeof(float_t), theta_ne, theta_nb, + GGML_MAX_DIMS, ACL_FORMAT_ND); // attn_factor if (attn_factor != 1) { @@ -2312,8 +2300,7 @@ static void aclnn_cache_init(ggml_backend_cann_context& ctx, ggml_tensor* dst, } // release - ggml_cann_release_resources(ctx, acl_theta_scale_tensor, acl_position_tensor, - acl_theta_tensor, acl_sin_tensor, acl_cos_tensor, acl_theta_scale); + ggml_cann_release_resources(ctx, acl_sin_tensor, acl_cos_tensor); } #ifdef __cplusplus @@ -3208,104 +3195,24 @@ void ggml_cann_flash_attn_ext(ggml_backend_cann_context& ctx, ggml_tensor* dst){ // Compute the slope if needed. Derived from ggml_cann_softmax(). if(maxBias != 0.0f){ // alibi - const int64_t ne2_ne3 = src0->ne[2] * src0->ne[3]; - const int64_t n_head = src0->ne[2]; - const int n_heads_log2_floor = 1u << (uint32_t)floor(log2(n_head)); - float m0 = powf(2.0f, -(maxBias) / n_heads_log2_floor); - float m1 = powf(2.0f, -(maxBias / 2.0f) / n_heads_log2_floor); - // init arange - ggml_cann_pool_alloc arange_allocator(ctx.pool(), - ne2_ne3 * faElemSize); - void* tmp_arange_buffer = arange_allocator.get(); + const int64_t n_heads = src0->ne[2]; + ggml_cann_pool_alloc slope_allocator(ctx.pool(), n_heads * sizeof(float)); + void* slope_buffer = slope_allocator.get(); + aclnn_get_slope(ctx, n_heads, slope_buffer, maxBias); - // arange1: [1, ..., n_heads_log2_floor+1) - float start = 1; - float stop = n_heads_log2_floor + 1; - float step = 1; - int64_t n_elements_arange = n_heads_log2_floor; - - int64_t tmp_arange1_ne[] = {n_heads_log2_floor}; - size_t tmp_arange1_nb[] = {faElemSize}; - aclTensor* tmp_arange1_tensor = ggml_cann_create_tensor( - tmp_arange_buffer, faDataType, faElemSize, - tmp_arange1_ne, tmp_arange1_nb, - GGML_MAX_DIMS - 3, ACL_FORMAT_ND); - - aclnn_arange(ctx, tmp_arange1_tensor, start, stop, step, n_elements_arange); - - aclTensor* tmp_arange2_tensor = nullptr; - if (n_heads_log2_floor < ne2_ne3) { - // arange2: [1, ..., 2 * (k - n_heads_log2_floor) + 1) - start = 1; - stop = 2 * (ne2_ne3 - n_heads_log2_floor) + 1; - step = 2; - n_elements_arange = ne2_ne3 - n_heads_log2_floor; - int64_t tmp_arange2_ne[] = {ne2_ne3 - n_heads_log2_floor}; - size_t tmp_arange2_nb[] = {faElemSize}; - - aclTensor* tmp_arange2_tensor = ggml_cann_create_tensor( - (char*)tmp_arange_buffer + - n_heads_log2_floor * faElemSize, - faDataType, faElemSize, - tmp_arange2_ne, tmp_arange2_nb, GGML_MAX_DIMS - 3, ACL_FORMAT_ND); - aclnn_arange(ctx, tmp_arange2_tensor, start, stop, step, - n_elements_arange); + int64_t slope_ne[] = {1, 1, n_heads, 1}; + size_t slope_nb[GGML_MAX_DIMS]; + slope_nb[0] = sizeof(float); + for(int i = 1;ine[2], src0->ne[3]}; - size_t tmp_mk_nb[GGML_MAX_DIMS]; - tmp_mk_nb[0] = faElemSize; - for (int i = 1; i < GGML_MAX_DIMS; i++) { - tmp_mk_nb[i] = tmp_mk_nb[i - 1] * tmp_mk_ne[i - 1]; - } - aclTensor* tmp_mk_tensor = ggml_cann_create_tensor( - tmp_mk_base_buffer, faDataType, faElemSize, - tmp_mk_ne, tmp_mk_nb, GGML_MAX_DIMS, - ACL_FORMAT_ND); - GGML_CANN_CALL_ACLNN_OP(ctx, InplaceMul, bcast_pse_tensor, tmp_mk_tensor); - - ggml_cann_release_resources(ctx, tmp_arange1_tensor, tmp_arange2_tensor, - tmp_mk_base1_tensor, tmp_mk_base2_tensor, tmp_mk_base_tensor, - tmp_arange_tensor, tmp_mk_tensor); + ggml_cann_release_resources(ctx, slope_tensor); } } diff --git a/ggml/src/ggml-cann/common.h b/ggml/src/ggml-cann/common.h index 9d294f72b6..2c2033bfba 100755 --- a/ggml/src/ggml-cann/common.h +++ b/ggml/src/ggml-cann/common.h @@ -368,6 +368,10 @@ struct ggml_backend_cann_context { std::string name; /**< Name of the device. */ std::string description; /**< Description of the device. */ aclrtEvent copy_event = nullptr; /**< Event for managing copy operations. */ + void* init_ptr = nullptr; + void* sin_ptr = nullptr; + void* cos_ptr = nullptr; + int64_t max_prompt_length = 65536; #ifdef USE_ACL_GRAPH /// Cached CANN ACL graph used for executing the current ggml computation graph. std::unique_ptr cann_graph; @@ -414,6 +418,15 @@ struct ggml_backend_cann_context { ACL_CHECK(aclrtDestroyStream(streams[i])); } } + if(init_ptr != nullptr) { + ACL_CHECK(aclrtFree(init_ptr)); + } + if(sin_ptr != nullptr) { + ACL_CHECK(aclrtFree(sin_ptr)); + } + if(cos_ptr != nullptr) { + ACL_CHECK(aclrtFree(cos_ptr)); + } } /** diff --git a/ggml/src/ggml-cann/ggml-cann.cpp b/ggml/src/ggml-cann/ggml-cann.cpp index cf575b3675..cb8af42ebf 100755 --- a/ggml/src/ggml-cann/ggml-cann.cpp +++ b/ggml/src/ggml-cann/ggml-cann.cpp @@ -2456,8 +2456,8 @@ static bool ggml_backend_cann_supports_op(ggml_backend_dev_t dev, // value of paddingW should be at most half of kernelW return (p0 <= (k0 / 2)) && (p1 <= (k1 / 2)); } - case GGML_OP_SUM: case GGML_OP_DUP: + case GGML_OP_SUM: case GGML_OP_IM2COL: case GGML_OP_CONCAT: case GGML_OP_REPEAT: @@ -2503,9 +2503,7 @@ static bool ggml_backend_cann_supports_op(ggml_backend_dev_t dev, if (op->src[2]) { return false; } - // TODO: support broadcast - // ref: https://github.com/ggml-org/llama.cpp/pull/14435 - return !op->src[1] || (op->src[1]->ne[2] == 1 && op->src[1]->ne[3] == 1); + return true; case GGML_OP_FLASH_ATTN_EXT:{ // derived from [ggml-cuda.cu] if(op->src[1]->type != GGML_TYPE_F16 || op->src[2]->type != GGML_TYPE_F16){ @@ -2532,11 +2530,6 @@ static bool ggml_backend_cann_supports_op(ggml_backend_dev_t dev, // DeepSeek MLA return false; } - // TODO: support broadcast - // ref: https://github.com/ggml-org/llama.cpp/pull/14435 - if (op->src[0]->ne[3] != 1) { - return false; - } float logitSoftcap = 0.0f; memcpy(&logitSoftcap, (float*)op->op_params + 2, sizeof(float)); if(logitSoftcap != 0.0f) { diff --git a/ggml/src/ggml-cpu/CMakeLists.txt b/ggml/src/ggml-cpu/CMakeLists.txt index f188d1638d..ce0a3e1285 100644 --- a/ggml/src/ggml-cpu/CMakeLists.txt +++ b/ggml/src/ggml-cpu/CMakeLists.txt @@ -460,7 +460,7 @@ function(ggml_add_cpu_backend_variant_impl tag_name) # NOTE: Only available from GCC 15.1.0 onwards. Any z17 machine with compile issues must first verify their GCC version. # binutils must also be updated to the latest for the -march=z17 flag to work. Otherwise, use -march=arch15. message(STATUS "z17 target") - list(APPEND ARCH_FLAGS -march=z17) + list(APPEND ARCH_FLAGS -march=arch15) else() message(STATUS "Unknown target") message(WARNING "Unknown target. If you are compiling for z14 and earlier, you might have to add -DGGML_VXE=OFF.") diff --git a/ggml/src/ggml-cpu/arch-fallback.h b/ggml/src/ggml-cpu/arch-fallback.h index b62e3158ee..0bfb92df17 100644 --- a/ggml/src/ggml-cpu/arch-fallback.h +++ b/ggml/src/ggml-cpu/arch-fallback.h @@ -40,18 +40,22 @@ #define ggml_gemv_q4_K_8x8_q8_K_generic ggml_gemv_q4_K_8x8_q8_K #define ggml_gemv_q2_K_8x8_q8_K_generic ggml_gemv_q2_K_8x8_q8_K #define ggml_gemv_iq4_nl_4x4_q8_0_generic ggml_gemv_iq4_nl_4x4_q8_0 +#define ggml_gemv_iq4_nl_8x8_q8_0_generic ggml_gemv_iq4_nl_8x8_q8_0 #define ggml_gemm_q4_0_4x4_q8_0_generic ggml_gemm_q4_0_4x4_q8_0 #define ggml_gemm_q4_0_4x8_q8_0_generic ggml_gemm_q4_0_4x8_q8_0 #define ggml_gemm_q4_0_8x8_q8_0_generic ggml_gemm_q4_0_8x8_q8_0 #define ggml_gemm_q4_K_8x8_q8_K_generic ggml_gemm_q4_K_8x8_q8_K #define ggml_gemm_q2_K_8x8_q8_K_generic ggml_gemm_q2_K_8x8_q8_K #define ggml_gemm_iq4_nl_4x4_q8_0_generic ggml_gemm_iq4_nl_4x4_q8_0 +#define ggml_gemm_iq4_nl_8x8_q8_0_generic ggml_gemm_iq4_nl_8x8_q8_0 #elif defined(__aarch64__) || defined(__arm__) || defined(_M_ARM) || defined(_M_ARM64) // repack.cpp #define ggml_quantize_mat_q8_K_4x8_generic ggml_quantize_mat_q8_K_4x8 #define ggml_gemv_q4_K_8x8_q8_K_generic ggml_gemv_q4_K_8x8_q8_K +#define ggml_gemv_iq4_nl_8x8_q8_0_generic ggml_gemv_iq4_nl_8x8_q8_0 #define ggml_gemv_q2_K_8x8_q8_K_generic ggml_gemv_q2_K_8x8_q8_K #define ggml_gemm_q4_K_8x8_q8_K_generic ggml_gemm_q4_K_8x8_q8_K +#define ggml_gemm_iq4_nl_8x8_q8_0_generic ggml_gemm_iq4_nl_8x8_q8_0 #define ggml_gemm_q2_K_8x8_q8_K_generic ggml_gemm_q2_K_8x8_q8_K #elif defined(__x86_64__) || defined(__i386__) || defined(_M_IX86) || defined(_M_X64) // repack.cpp @@ -69,7 +73,6 @@ #define ggml_vec_dot_tq1_0_q8_K_generic ggml_vec_dot_tq1_0_q8_K #define ggml_vec_dot_tq2_0_q8_K_generic ggml_vec_dot_tq2_0_q8_K #define ggml_vec_dot_iq1_m_q8_K_generic ggml_vec_dot_iq1_m_q8_K -#define ggml_vec_dot_mxfp4_q8_0_generic ggml_vec_dot_mxfp4_q8_0 // repack.cpp #define ggml_quantize_mat_q8_0_4x4_generic ggml_quantize_mat_q8_0_4x4 #define ggml_quantize_mat_q8_0_4x8_generic ggml_quantize_mat_q8_0_4x8 @@ -80,12 +83,14 @@ #define ggml_gemv_q4_K_8x8_q8_K_generic ggml_gemv_q4_K_8x8_q8_K #define ggml_gemv_q2_K_8x8_q8_K_generic ggml_gemv_q2_K_8x8_q8_K #define ggml_gemv_iq4_nl_4x4_q8_0_generic ggml_gemv_iq4_nl_4x4_q8_0 +#define ggml_gemv_iq4_nl_8x8_q8_0_generic ggml_gemv_iq4_nl_8x8_q8_0 #define ggml_gemm_q4_0_4x4_q8_0_generic ggml_gemm_q4_0_4x4_q8_0 #define ggml_gemm_q4_0_4x8_q8_0_generic ggml_gemm_q4_0_4x8_q8_0 #define ggml_gemm_q4_0_8x8_q8_0_generic ggml_gemm_q4_0_8x8_q8_0 #define ggml_gemm_q4_K_8x8_q8_K_generic ggml_gemm_q4_K_8x8_q8_K #define ggml_gemm_q2_K_8x8_q8_K_generic ggml_gemm_q2_K_8x8_q8_K #define ggml_gemm_iq4_nl_4x4_q8_0_generic ggml_gemm_iq4_nl_4x4_q8_0 +#define ggml_gemm_iq4_nl_8x8_q8_0_generic ggml_gemm_iq4_nl_8x8_q8_0 #elif defined(__loongarch64) // quants.c #define quantize_row_q8_K_generic quantize_row_q8_K @@ -103,12 +108,14 @@ #define ggml_gemv_q4_K_8x8_q8_K_generic ggml_gemv_q4_K_8x8_q8_K #define ggml_gemv_q2_K_8x8_q8_K_generic ggml_gemv_q2_K_8x8_q8_K #define ggml_gemv_iq4_nl_4x4_q8_0_generic ggml_gemv_iq4_nl_4x4_q8_0 +#define ggml_gemv_iq4_nl_8x8_q8_0_generic ggml_gemv_iq4_nl_8x8_q8_0 #define ggml_gemm_q4_0_4x4_q8_0_generic ggml_gemm_q4_0_4x4_q8_0 #define ggml_gemm_q4_0_4x8_q8_0_generic ggml_gemm_q4_0_4x8_q8_0 #define ggml_gemm_q4_0_8x8_q8_0_generic ggml_gemm_q4_0_8x8_q8_0 #define ggml_gemm_q4_K_8x8_q8_K_generic ggml_gemm_q4_K_8x8_q8_K #define ggml_gemm_q2_K_8x8_q8_K_generic ggml_gemm_q2_K_8x8_q8_K #define ggml_gemm_iq4_nl_4x4_q8_0_generic ggml_gemm_iq4_nl_4x4_q8_0 +#define ggml_gemm_iq4_nl_8x8_q8_0_generic ggml_gemm_iq4_nl_8x8_q8_0 #elif defined(__riscv) // quants.c #define quantize_row_q8_K_generic quantize_row_q8_K @@ -133,11 +140,13 @@ #define ggml_gemv_q4_K_8x8_q8_K_generic ggml_gemv_q4_K_8x8_q8_K #define ggml_gemv_q2_K_8x8_q8_K_generic ggml_gemv_q2_K_8x8_q8_K #define ggml_gemv_iq4_nl_4x4_q8_0_generic ggml_gemv_iq4_nl_4x4_q8_0 +#define ggml_gemv_iq4_nl_8x8_q8_0_generic ggml_gemv_iq4_nl_8x8_q8_0 #define ggml_gemm_q4_0_4x4_q8_0_generic ggml_gemm_q4_0_4x4_q8_0 #define ggml_gemm_q4_0_4x8_q8_0_generic ggml_gemm_q4_0_4x8_q8_0 #define ggml_gemm_q4_K_8x8_q8_K_generic ggml_gemm_q4_K_8x8_q8_K #define ggml_gemm_q2_K_8x8_q8_K_generic ggml_gemm_q2_K_8x8_q8_K #define ggml_gemm_iq4_nl_4x4_q8_0_generic ggml_gemm_iq4_nl_4x4_q8_0 +#define ggml_gemm_iq4_nl_8x8_q8_0_generic ggml_gemm_iq4_nl_8x8_q8_0 #elif defined(__s390x__) // quants.c #define quantize_row_q8_K_generic quantize_row_q8_K @@ -164,12 +173,14 @@ #define ggml_gemv_q4_K_8x8_q8_K_generic ggml_gemv_q4_K_8x8_q8_K #define ggml_gemv_q2_K_8x8_q8_K_generic ggml_gemv_q2_K_8x8_q8_K #define ggml_gemv_iq4_nl_4x4_q8_0_generic ggml_gemv_iq4_nl_4x4_q8_0 +#define ggml_gemv_iq4_nl_8x8_q8_0_generic ggml_gemv_iq4_nl_8x8_q8_0 #define ggml_gemm_q4_0_4x4_q8_0_generic ggml_gemm_q4_0_4x4_q8_0 #define ggml_gemm_q4_0_4x8_q8_0_generic ggml_gemm_q4_0_4x8_q8_0 #define ggml_gemm_q4_0_8x8_q8_0_generic ggml_gemm_q4_0_8x8_q8_0 #define ggml_gemm_q4_K_8x8_q8_K_generic ggml_gemm_q4_K_8x8_q8_K #define ggml_gemm_q2_K_8x8_q8_K_generic ggml_gemm_q2_K_8x8_q8_K #define ggml_gemm_iq4_nl_4x4_q8_0_generic ggml_gemm_iq4_nl_4x4_q8_0 +#define ggml_gemm_iq4_nl_8x8_q8_0_generic ggml_gemm_iq4_nl_8x8_q8_0 #elif defined(__wasm__) // quants.c #define ggml_vec_dot_q4_1_q8_1_generic ggml_vec_dot_q4_1_q8_1 @@ -195,10 +206,12 @@ #define ggml_gemv_q4_K_8x8_q8_K_generic ggml_gemv_q4_K_8x8_q8_K #define ggml_gemv_q2_K_8x8_q8_K_generic ggml_gemv_q2_K_8x8_q8_K #define ggml_gemv_iq4_nl_4x4_q8_0_generic ggml_gemv_iq4_nl_4x4_q8_0 +#define ggml_gemv_iq4_nl_8x8_q8_0_generic ggml_gemv_iq4_nl_8x8_q8_0 #define ggml_gemm_q4_0_4x4_q8_0_generic ggml_gemm_q4_0_4x4_q8_0 #define ggml_gemm_q4_0_4x8_q8_0_generic ggml_gemm_q4_0_4x8_q8_0 #define ggml_gemm_q4_0_8x8_q8_0_generic ggml_gemm_q4_0_8x8_q8_0 #define ggml_gemm_q4_K_8x8_q8_K_generic ggml_gemm_q4_K_8x8_q8_K #define ggml_gemm_q2_K_8x8_q8_K_generic ggml_gemm_q2_K_8x8_q8_K #define ggml_gemm_iq4_nl_4x4_q8_0_generic ggml_gemm_iq4_nl_4x4_q8_0 +#define ggml_gemm_iq4_nl_8x8_q8_0_generic ggml_gemm_iq4_nl_8x8_q8_0 #endif diff --git a/ggml/src/ggml-cpu/arch/powerpc/quants.c b/ggml/src/ggml-cpu/arch/powerpc/quants.c index 49aae7a23b..d3dfd049ea 100644 --- a/ggml/src/ggml-cpu/arch/powerpc/quants.c +++ b/ggml/src/ggml-cpu/arch/powerpc/quants.c @@ -278,6 +278,72 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * GGML_RESTRICT s, size_t bs, const voi #endif } +void ggml_vec_dot_mxfp4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + assert(n % QK_MXFP4 == 0); + static_assert(QK_MXFP4 == QK8_0, "QK_MXFP4 and QK8_0 must be the same"); + + const block_mxfp4 * GGML_RESTRICT x = vx; + const block_q8_0 * GGML_RESTRICT y = vy; + + const int nb = n / QK_MXFP4; + + int ib = 0; + float sumf = 0; + +#if defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0xF); + const vector unsigned char vshift4 = vec_splats((unsigned char)4); + vector float vsumf0 = vec_splats(0.0f); + + vector signed char kv = vec_xl(0, (const signed char *)kvalues_mxfp4); + +#pragma GCC unroll 8 + for (; ib < nb; ++ib) { + __builtin_prefetch(x[ib].qs, 0, 1); + __builtin_prefetch(y[ib].qs, 0, 1); + + vector float vyd = vec_splats(GGML_CPU_FP16_TO_FP32(y[ib].d) * + GGML_E8M0_TO_FP32_HALF(x[ib].e)); + + vector signed char q8y0 = vec_xl( 0, y[ib].qs); + vector signed char q8y1 = vec_xl(16, y[ib].qs); + + vector signed char qxs = (vector signed char)vec_xl(0, x[ib].qs); + + vector unsigned char lo_nibbles = (vector unsigned char)vec_and(qxs, lowMask); + vector unsigned char hi_nibbles = (vector unsigned char)vec_sr(qxs, vshift4); + + vector signed char q4x0 = vec_perm(kv, kv, lo_nibbles); + vector signed char q4x1 = vec_perm(kv, kv, hi_nibbles); + + vector signed short qv0 = vec_add(vec_mule(q4x0, q8y0), vec_mulo(q4x0, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q4x1, q8y1), vec_mulo(q4x1, q8y1)); + + vector signed int vsumi0 = vec_splats((int32_t)0); + vsumi0 = vec_sum4s(qv0, vsumi0); + vsumi0 = vec_sum4s(qv1, vsumi0); + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vyd, vsumf0); + } + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + sumf = vec_extract(vsumf0, 0); + *s = sumf; +#else + UNUSED(x); + UNUSED(y); + UNUSED(ib); + UNUSED(sumf); + ggml_vec_dot_mxfp4_q8_0_generic(n, s, bs, vx, bx, vy, by, nrc); +#endif +} + void ggml_vec_dot_q5_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; diff --git a/ggml/src/ggml-cpu/arch/x86/repack.cpp b/ggml/src/ggml-cpu/arch/x86/repack.cpp index 37933a4b23..d95bb6d8aa 100644 --- a/ggml/src/ggml-cpu/arch/x86/repack.cpp +++ b/ggml/src/ggml-cpu/arch/x86/repack.cpp @@ -511,38 +511,34 @@ void ggml_quantize_mat_q8_K_4x8(const float * GGML_RESTRICT x, void * GGML_RESTR #endif } -void ggml_gemv_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { +// +// GEMV/GEMM templates +// + +#if defined(__AVX2__) || defined(__AVX512F__) + +// GEMV for 8x blocks of 32 4-bit quants with a single scale factor per block +template +static void gemv_q4_b32_8x8_q8_0_lut_avx(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc, __m256i signextendlut) { + static_assert( + std::is_same_v || + std::is_same_v, + "Unsupported block type"); + const int qk = QK8_0; const int nb = n / qk; - const int ncols_interleaved = 8; - const int blocklen = 8; - assert (n % qk == 0); - assert (nc % ncols_interleaved == 0); - - UNUSED(s); UNUSED(bs); - UNUSED(vx); - UNUSED(vy); - UNUSED(nr); - UNUSED(nc); - UNUSED(nb); - UNUSED(ncols_interleaved); - UNUSED(blocklen); -#if defined(__AVX2__) - // Lookup table to convert signed nibbles to signed bytes - __m256i signextendlut = _mm256_castsi128_si256(_mm_set_epi8(-1, -2, -3, -4, -5, -6, -7, -8, 7, 6, 5, 4, 3, 2, 1, 0)); - signextendlut = _mm256_permute2f128_si256(signextendlut, signextendlut, 0); __m128i changemask = _mm_set_epi8(15, 14, 7, 6, 13, 12, 5, 4, 11, 10, 3, 2, 9, 8, 1, 0); __m256i finalpermutemask = _mm256_set_epi32(7, 5, 3, 1, 6, 4, 2, 0); // Permute mask used for easier vector processing at later stages const __m256i m4b = _mm256_set1_epi8(0x0F); - int64_t b_nb = n / QK4_0; + int64_t b_nb = n / 32; - const block_q4_0x8 * b_ptr_start = (const block_q4_0x8 *)vx; + const block_tx8 * b_ptr_start = (const block_tx8 *)vx; const block_q8_0 * a_ptr_start = (const block_q8_0 *)vy; // Process Q8_0 blocks one by one @@ -551,17 +547,17 @@ void ggml_gemv_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const vo // Pointers to LHS blocks of block_q8_0 format const block_q8_0 * a_ptr = a_ptr_start + (y * nb); - // Take group of eight block_q4_0x8 structures at each pass of the loop and perform dot product operation + // Take group of eight blocks at each pass of the loop and perform dot product operation for (int64_t x = 0; x < nc / 8; x++) { // Pointers to RHS blocks - const block_q4_0x8 * b_ptr = b_ptr_start + (x * b_nb); + const block_tx8 * b_ptr = b_ptr_start + (x * b_nb); // Master FP accumulator __m256 acc_row = _mm256_setzero_ps(); for (int64_t b = 0; b < nb; b++) { - // Load 8 blocks of Q4_0 interleaved as 8 bytes (B0 - B7) + // Load 8 blocks of 32 interleaved as 8 bytes (B0 - B7) const __m256i rhs_raw_vec_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs)); const __m256i rhs_raw_vec_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs) + 1); const __m256i rhs_raw_vec_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs) + 2); @@ -578,8 +574,13 @@ void ggml_gemv_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const vo const __m256i rhs_vec_0123_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_vec_0123_1, 4), m4b)); // B0(24-31) B1(24-31) B2(24-31) B3(24-31) const __m256i rhs_vec_4567_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_vec_4567_1, 4), m4b)); // B4(24-31) B5(24-31) B6(24-31) B7(24-31) - // Load the scale values for the 8 blocks interleaved in block_q4_0x8 - const __m256 col_scale_f32 = GGML_F32Cx8_REARRANGE_LOAD(b_ptr[b].d, changemask); + // Load the scale values for the 8 blocks interleaved in block_tx8 + __m256 col_scale_f32; + if constexpr ( + std::is_same_v || + std::is_same_v) { + col_scale_f32 = GGML_F32Cx8_REARRANGE_LOAD(b_ptr[b].d, changemask); + } // Load and convert to FP32 scale from block_q8_0 const __m256 row_scale_f32 = _mm256_set1_ps(GGML_CPU_FP16_TO_FP32(a_ptr[b].d)); @@ -620,9 +621,771 @@ void ggml_gemv_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const vo _mm256_storeu_ps(s + (y * nr + x * 8), acc_row); } } - return; +} +// GEMM for 8x blocks of 32 4-bit quants with a single scale factor per block +template +static void gemm_q4_b32_8x8_q8_0_lut_avx(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc, __m256i signextendlut) { + static_assert( + std::is_same_v || + std::is_same_v, + "Unsupported block type"); + + const int qk = QK8_0; + const int nb = n / qk; + + const block_tx8 * b_ptr_start = (const block_tx8 *)vx; + const block_q8_0x4 * a_ptr_start = (const block_q8_0x4 *)vy; + + int64_t b_nb = n / 32; + int64_t y = 0; + // Mask to mask out nibbles from packed bytes + const __m256i m4b = _mm256_set1_epi8(0x0F); + const __m128i loadMask = _mm_blend_epi32(_mm_setzero_si128(), _mm_set1_epi32(0xFFFFFFFF), 3); + // Permute mask used for easier vector processing at later stages + __m256i requiredOrder = _mm256_set_epi32(3, 2, 1, 0, 7, 6, 5, 4); + int64_t xstart = 0; + int anr = nr - nr%16; // Used to align nr with boundary of 16 +#ifdef __AVX512F__ + int anc = nc - nc%16; // Used to align nc with boundary of 16 + // Mask to mask out nibbles from packed bytes expanded to 512 bit length + const __m512i m4bexpanded = _mm512_set1_epi8(0x0F); + // Lookup table to convert signed nibbles to signed bytes expanded to 512 bit length + __m512i signextendlutexpanded = _mm512_inserti32x8(_mm512_castsi256_si512(signextendlut), signextendlut, 1); + + // Take group of four block_q8_0x4 structures at each pass of the loop and perform dot product operation + for (; y < anr / 4; y += 4) { + + const block_q8_0x4 * a_ptrs[4]; + + a_ptrs[0] = a_ptr_start + (y * nb); + for (int i = 0; i < 3; ++i) { + a_ptrs[i + 1] = a_ptrs[i] + nb; + } + + // Take group of two block_tx8 structures at each pass of the loop and perform dot product operation + for (int64_t x = 0; x < anc / 8; x += 2) { + + const block_tx8 * b_ptr_0 = b_ptr_start + ((x) * b_nb); + const block_tx8 * b_ptr_1 = b_ptr_start + ((x + 1) * b_nb); + + // Master FP accumulators + __m512 acc_rows[16]; + for (int i = 0; i < 16; i++) { + acc_rows[i] = _mm512_setzero_ps(); + } + + for (int64_t b = 0; b < nb; b++) { + // Load the sixteen blocks of quantized values interleaved with each other in chunks of eight - B0,B1 ....BE,BF + const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs)); + const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 32)); + const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 64)); + const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 96)); + + const __m256i rhs_raw_mat_89AB_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs)); + const __m256i rhs_raw_mat_CDEF_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 32)); + const __m256i rhs_raw_mat_89AB_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 64)); + const __m256i rhs_raw_mat_CDEF_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 96)); + + // Save the values in the following vectors in the formats B0B1B4B5B8B9BCBD, B2B3B6B7BABBBEBF for further processing and storing of values + const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); + const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); + + const __m256i rhs_raw_mat_89CD_0 = _mm256_blend_epi32(rhs_raw_mat_89AB_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_0, requiredOrder), 240); + const __m256i rhs_raw_mat_ABEF_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_0, requiredOrder), rhs_raw_mat_CDEF_0, 240); + const __m256i rhs_raw_mat_89CD_1 = _mm256_blend_epi32(rhs_raw_mat_89AB_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_1, requiredOrder), 240); + const __m256i rhs_raw_mat_ABEF_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_1, requiredOrder), rhs_raw_mat_CDEF_1, 240); + + const __m512i rhs_raw_mat_014589CD_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_0), rhs_raw_mat_89CD_0, 1); + const __m512i rhs_raw_mat_2367ABEF_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_0), rhs_raw_mat_ABEF_0, 1); + const __m512i rhs_raw_mat_014589CD_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_1), rhs_raw_mat_89CD_1, 1); + const __m512i rhs_raw_mat_2367ABEF_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_1), rhs_raw_mat_ABEF_1, 1); + + // 4-bit -> 8-bit - Sign is maintained + const __m512i rhs_mat_014589CD_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_0, m4bexpanded)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) B8(0-7) B9(0-7) BC(0-7) BD(0-7) + const __m512i rhs_mat_2367ABEF_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_0, m4bexpanded)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) BA(0-7) BB(0-7) BE(0-7) BF(0-7) + + const __m512i rhs_mat_014589CD_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_1, m4bexpanded)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) B8(8-15) B9(8-15) BC(8-15) BD(8-15) + const __m512i rhs_mat_2367ABEF_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_1, m4bexpanded)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) BA(8-15) BB(8-15) BE(8-15) BF(8-15) + + const __m512i rhs_mat_014589CD_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_0, 4), m4bexpanded)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) B8(16-23) B9(16-23) BC(16-23) BD(16-23) + const __m512i rhs_mat_2367ABEF_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_0, 4), m4bexpanded)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) BA(16-23) BB(16-23) BE(16-23) BF(16-23) + + const __m512i rhs_mat_014589CD_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_1, 4), m4bexpanded)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) B8(24-31) B9(24-31) BC(24-31) BD(24-31) + const __m512i rhs_mat_2367ABEF_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_1, 4), m4bexpanded)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) BA(24-31) BB(24-31) BE(24-31) BF(24-31) + + // Shuffle pattern one - right side input + const __m512i rhs_mat_014589CD_0_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) B8(0-3) B9(0-3) B8(0-3) B9(0-3) BC(0-3) BD(0-3) BC(0-3) BD(0-3) + const __m512i rhs_mat_2367ABEF_0_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) BA(0-3) BB(0-3) BA(0-3) BB(0-3) BE(0-3) BF(0-3) BE(0-3) BF(0-3) + + const __m512i rhs_mat_014589CD_1_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) B8(8-11) B9(8-11) B8(8-11) B9(8-11) BC(8-11) BD(8-11) BC(8-11) BD(8-11) + const __m512i rhs_mat_2367ABEF_1_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) BA(8-11) BB(8-11) BA(8-11) BB(8-11) BE(8-11) BF(8-11) BE(8-11) BF(8-11) + + const __m512i rhs_mat_014589CD_2_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) B8(16-19) B9(16-19) B8(16-19) B9(16-19) BC(16-19) BD(16-19) BC(16-19) BD(16-19) + const __m512i rhs_mat_2367ABEF_2_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) BA(16-19) BB(16-19) BA(16-19) BB(16-19) BE(16-19) BF(16-19) BE(16-19) BF(16-19) + + const __m512i rhs_mat_014589CD_3_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) B8(24-27) B9(24-27) B8(24-27) B9(24-27) BC(24-27) BD(24-27) BC(24-27) BD(24-27) + const __m512i rhs_mat_2367ABEF_3_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) BA(24-27) BB(24-27) BA(24-27) BB(24-27) BE(24-27) BF(24-27) BE(24-27) BF(24-27) + + // Shuffle pattern two - right side input + + const __m512i rhs_mat_014589CD_0_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) B8(4-7) B9(4-7) B8(4-7) B9(4-7) BC(4-7) BD(4-7) BC(4-7) BD(4-7) + const __m512i rhs_mat_2367ABEF_0_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) BA(4-7) BB(4-7) BA(4-7) BB(4-7) BE(4-7) BF(4-7) BE(4-7) BF(4-7) + + const __m512i rhs_mat_014589CD_1_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) B8(12-15) B9(12-15) B8(12-15) B9(12-15) BC(12-15) BD(12-15) BC(12-15) BD(12-15) + const __m512i rhs_mat_2367ABEF_1_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) BA(12-15) BB(12-15) BA(12-15) BB(12-15) BE(12-15) BF(12-15) BE(12-15) BF(12-15) + + const __m512i rhs_mat_014589CD_2_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) B8(20-23) B9(20-23) B8(20-23) B9(20-23) BC(20-23) BD(20-23) BC(20-23) BD(20-23) + const __m512i rhs_mat_2367ABEF_2_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) BA(20-23) BB(20-23) BA(20-23) BB(20-23) BE(20-23) BF(20-23) BE(20-23) BF(20-23) + + const __m512i rhs_mat_014589CD_3_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) B8(28-31) B9(28-31) B8(28-31) B9(28-31) BC(28-31) BD(28-31) BC(28-31) BD(28-31) + const __m512i rhs_mat_2367ABEF_3_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) BA(28-31) BB(28-31) BA(28-31) BB(28-31) BE(28-31) BF(28-31) BE(28-31) BF(28-31) + + // Scale values - Load the weight scale values of two block_tx8 + __m512 col_scale_f32; + if constexpr ( + std::is_same_v || + std::is_same_v) { + col_scale_f32 = GGML_F32Cx8x2_LOAD(b_ptr_0[b].d, b_ptr_1[b].d); + } + + // Process LHS in pairs of rows + for (int rp = 0; rp < 4; rp++) { + + // Load the four blocks of quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 + // Loaded as set of 128 bit vectors and repeated and stored into a 256 bit vector before again repeating into 512 bit vector + __m256i lhs_mat_ymm_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs))); + __m256i lhs_mat_ymm_01_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 0); + __m256i lhs_mat_ymm_23_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 17); + __m256i lhs_mat_ymm_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 32))); + __m256i lhs_mat_ymm_01_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 0); + __m256i lhs_mat_ymm_23_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 17); + __m256i lhs_mat_ymm_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 64))); + __m256i lhs_mat_ymm_01_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 0); + __m256i lhs_mat_ymm_23_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 17); + __m256i lhs_mat_ymm_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 96))); + __m256i lhs_mat_ymm_01_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 0); + __m256i lhs_mat_ymm_23_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 17); + + __m512i lhs_mat_01_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_0), lhs_mat_ymm_01_0, 1); + __m512i lhs_mat_23_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_0), lhs_mat_ymm_23_0, 1); + __m512i lhs_mat_01_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_1), lhs_mat_ymm_01_1, 1); + __m512i lhs_mat_23_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_1), lhs_mat_ymm_23_1, 1); + __m512i lhs_mat_01_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_2), lhs_mat_ymm_01_2, 1); + __m512i lhs_mat_23_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_2), lhs_mat_ymm_23_2, 1); + __m512i lhs_mat_01_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_3), lhs_mat_ymm_01_3, 1); + __m512i lhs_mat_23_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_3), lhs_mat_ymm_23_3, 1); + + // Shuffle pattern one - left side input + + const __m512i lhs_mat_01_0_sp1 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) + const __m512i lhs_mat_23_0_sp1 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) + + const __m512i lhs_mat_01_1_sp1 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) + const __m512i lhs_mat_23_1_sp1 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) + + const __m512i lhs_mat_01_2_sp1 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) + const __m512i lhs_mat_23_2_sp1 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) + + const __m512i lhs_mat_01_3_sp1 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) + const __m512i lhs_mat_23_3_sp1 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) + + // Shuffle pattern two - left side input + + const __m512i lhs_mat_01_0_sp2 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) + const __m512i lhs_mat_23_0_sp2 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) + + const __m512i lhs_mat_01_1_sp2 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) + const __m512i lhs_mat_23_1_sp2 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) + + const __m512i lhs_mat_01_2_sp2 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) + const __m512i lhs_mat_23_2_sp2 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) + + const __m512i lhs_mat_01_3_sp2 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) + const __m512i lhs_mat_23_3_sp2 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) + + // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane + // Resembles MMLAs into 2x2 matrices in ARM Version + const __m512i zero = _mm512_setzero_epi32(); + __m512i iacc_mat_00_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp1, rhs_mat_014589CD_3_sp1), lhs_mat_01_2_sp1, rhs_mat_014589CD_2_sp1), lhs_mat_01_1_sp1, rhs_mat_014589CD_1_sp1), lhs_mat_01_0_sp1, rhs_mat_014589CD_0_sp1); + __m512i iacc_mat_01_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp1, rhs_mat_2367ABEF_3_sp1), lhs_mat_01_2_sp1, rhs_mat_2367ABEF_2_sp1), lhs_mat_01_1_sp1, rhs_mat_2367ABEF_1_sp1), lhs_mat_01_0_sp1, rhs_mat_2367ABEF_0_sp1); + __m512i iacc_mat_10_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp1, rhs_mat_014589CD_3_sp1), lhs_mat_23_2_sp1, rhs_mat_014589CD_2_sp1), lhs_mat_23_1_sp1, rhs_mat_014589CD_1_sp1), lhs_mat_23_0_sp1, rhs_mat_014589CD_0_sp1); + __m512i iacc_mat_11_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp1, rhs_mat_2367ABEF_3_sp1), lhs_mat_23_2_sp1, rhs_mat_2367ABEF_2_sp1), lhs_mat_23_1_sp1, rhs_mat_2367ABEF_1_sp1), lhs_mat_23_0_sp1, rhs_mat_2367ABEF_0_sp1); + __m512i iacc_mat_00_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp2, rhs_mat_014589CD_3_sp2), lhs_mat_01_2_sp2, rhs_mat_014589CD_2_sp2), lhs_mat_01_1_sp2, rhs_mat_014589CD_1_sp2), lhs_mat_01_0_sp2, rhs_mat_014589CD_0_sp2); + __m512i iacc_mat_01_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp2, rhs_mat_2367ABEF_3_sp2), lhs_mat_01_2_sp2, rhs_mat_2367ABEF_2_sp2), lhs_mat_01_1_sp2, rhs_mat_2367ABEF_1_sp2), lhs_mat_01_0_sp2, rhs_mat_2367ABEF_0_sp2); + __m512i iacc_mat_10_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp2, rhs_mat_014589CD_3_sp2), lhs_mat_23_2_sp2, rhs_mat_014589CD_2_sp2), lhs_mat_23_1_sp2, rhs_mat_014589CD_1_sp2), lhs_mat_23_0_sp2, rhs_mat_014589CD_0_sp2); + __m512i iacc_mat_11_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp2, rhs_mat_2367ABEF_3_sp2), lhs_mat_23_2_sp2, rhs_mat_2367ABEF_2_sp2), lhs_mat_23_1_sp2, rhs_mat_2367ABEF_1_sp2), lhs_mat_23_0_sp2, rhs_mat_2367ABEF_0_sp2); + + // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block + __m512i iacc_mat_00 = _mm512_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); + __m512i iacc_mat_01 = _mm512_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); + __m512i iacc_mat_10 = _mm512_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); + __m512i iacc_mat_11 = _mm512_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); + + + // Straighten out to make 4 row vectors + __m512i iacc_row_0 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_00, _mm512_shuffle_epi32(iacc_mat_01, (_MM_PERM_ENUM)78)); + __m512i iacc_row_1 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_00, (_MM_PERM_ENUM)78), iacc_mat_01); + __m512i iacc_row_2 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_10, _mm512_shuffle_epi32(iacc_mat_11, (_MM_PERM_ENUM)78)); + __m512i iacc_row_3 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_10, (_MM_PERM_ENUM)78), iacc_mat_11); + + // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes + const __m128i row_scale_f16 = _mm_shuffle_epi32(_mm_maskload_epi32((int const*)(a_ptrs[rp][b].d), loadMask), 68); + const __m512 row_scale_f32 = GGML_F32Cx16_REPEAT_LOAD(row_scale_f16); + + // Multiply with appropiate scales and accumulate + acc_rows[rp * 4] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_0), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[rp * 4]); + acc_rows[rp * 4 + 1] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_1), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[rp * 4 + 1]); + acc_rows[rp * 4 + 2] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_2), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[rp * 4 + 2]); + acc_rows[rp * 4 + 3] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_3), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[rp * 4 + 3]); + } + } + + // Store the accumulated values + for (int i = 0; i < 16; i++) { + _mm512_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); + } + } + } + + // Take a block_q8_0x4 structures at each pass of the loop and perform dot product operation + for (; y < nr / 4; y ++) { + const block_q8_0x4 * a_ptr = a_ptr_start + (y * nb); + + // Take group of two block_tx8 structures at each pass of the loop and perform dot product operation + for (int64_t x = 0; x < anc / 8; x += 2) { + + const block_tx8 * b_ptr_0 = b_ptr_start + ((x) * b_nb); + const block_tx8 * b_ptr_1 = b_ptr_start + ((x + 1) * b_nb); + + // Master FP accumulators + __m512 acc_rows[4]; + for (int i = 0; i < 4; i++) { + acc_rows[i] = _mm512_setzero_ps(); + } + + for (int64_t b = 0; b < nb; b++) { + // Load the sixteen blocks of quantized values interleaved with each other in chunks of eight - B0,B1 ....BE,BF + const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs)); + const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 32)); + const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 64)); + const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 96)); + + const __m256i rhs_raw_mat_89AB_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs)); + const __m256i rhs_raw_mat_CDEF_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 32)); + const __m256i rhs_raw_mat_89AB_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 64)); + const __m256i rhs_raw_mat_CDEF_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 96)); + + // Save the values in the following vectors in the formats B0B1B4B5, B2B3B6B7 for further processing and storing of valuess + const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); + const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); + + const __m256i rhs_raw_mat_89CD_0 = _mm256_blend_epi32(rhs_raw_mat_89AB_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_0, requiredOrder), 240); + const __m256i rhs_raw_mat_ABEF_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_0, requiredOrder), rhs_raw_mat_CDEF_0, 240); + const __m256i rhs_raw_mat_89CD_1 = _mm256_blend_epi32(rhs_raw_mat_89AB_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_1, requiredOrder), 240); + const __m256i rhs_raw_mat_ABEF_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_1, requiredOrder), rhs_raw_mat_CDEF_1, 240); + + const __m512i rhs_raw_mat_014589CD_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_0), rhs_raw_mat_89CD_0, 1); + const __m512i rhs_raw_mat_2367ABEF_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_0), rhs_raw_mat_ABEF_0, 1); + const __m512i rhs_raw_mat_014589CD_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_1), rhs_raw_mat_89CD_1, 1); + const __m512i rhs_raw_mat_2367ABEF_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_1), rhs_raw_mat_ABEF_1, 1); + + // 4-bit -> 8-bit - Sign is maintained + const __m512i rhs_mat_014589CD_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_0, m4bexpanded)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) B8(0-7) B9(0-7) BC(0-7) BD(0-7) + const __m512i rhs_mat_2367ABEF_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_0, m4bexpanded)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) BA(0-7) BB(0-7) BE(0-7) BF(0-7) + + const __m512i rhs_mat_014589CD_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_1, m4bexpanded)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) B8(8-15) B9(8-15) BC(8-15) BD(8-15) + const __m512i rhs_mat_2367ABEF_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_1, m4bexpanded)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) BA(8-15) BB(8-15) BE(8-15) BF(8-15) + + const __m512i rhs_mat_014589CD_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_0, 4), m4bexpanded)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) B8(16-23) B9(16-23) BC(16-23) BD(16-23) + const __m512i rhs_mat_2367ABEF_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_0, 4), m4bexpanded)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) BA(16-23) BB(16-23) BE(16-23) BF(16-23) + + const __m512i rhs_mat_014589CD_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_1, 4), m4bexpanded)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) B8(24-31) B9(24-31) BC(24-31) BD(24-31) + const __m512i rhs_mat_2367ABEF_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_1, 4), m4bexpanded)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) BA(24-31) BB(24-31) BE(24-31) BF(24-31) + + // Shuffle pattern one - right side input + const __m512i rhs_mat_014589CD_0_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) B8(0-3) B9(0-3) B8(0-3) B9(0-3) BC(0-3) BD(0-3) BC(0-3) BD(0-3) + const __m512i rhs_mat_2367ABEF_0_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) BA(0-3) BB(0-3) BA(0-3) BB(0-3) BE(0-3) BF(0-3) BE(0-3) BF(0-3) + + const __m512i rhs_mat_014589CD_1_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) B8(8-11) B9(8-11) B8(8-11) B9(8-11) BC(8-11) BD(8-11) BC(8-11) BD(8-11) + const __m512i rhs_mat_2367ABEF_1_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) BA(8-11) BB(8-11) BA(8-11) BB(8-11) BE(8-11) BF(8-11) BE(8-11) BF(8-11) + + const __m512i rhs_mat_014589CD_2_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) B8(16-19) B9(16-19) B8(16-19) B9(16-19) BC(16-19) BD(16-19) BC(16-19) BD(16-19) + const __m512i rhs_mat_2367ABEF_2_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) BA(16-19) BB(16-19) BA(16-19) BB(16-19) BE(16-19) BF(16-19) BE(16-19) BF(16-19) + + const __m512i rhs_mat_014589CD_3_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) B8(24-27) B9(24-27) B8(24-27) B9(24-27) BC(24-27) BD(24-27) BC(24-27) BD(24-27) + const __m512i rhs_mat_2367ABEF_3_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) BA(24-27) BB(24-27) BA(24-27) BB(24-27) BE(24-27) BF(24-27) BE(24-27) BF(24-27) + + // Shuffle pattern two - right side input + + const __m512i rhs_mat_014589CD_0_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) B8(4-7) B9(4-7) B8(4-7) B9(4-7) BC(4-7) BD(4-7) BC(4-7) BD(4-7) + const __m512i rhs_mat_2367ABEF_0_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) BA(4-7) BB(4-7) BA(4-7) BB(4-7) BE(4-7) BF(4-7) BE(4-7) BF(4-7) + + const __m512i rhs_mat_014589CD_1_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) B8(12-15) B9(12-15) B8(12-15) B9(12-15) BC(12-15) BD(12-15) BC(12-15) BD(12-15) + const __m512i rhs_mat_2367ABEF_1_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) BA(12-15) BB(12-15) BA(12-15) BB(12-15) BE(12-15) BF(12-15) BE(12-15) BF(12-15) + + const __m512i rhs_mat_014589CD_2_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) B8(20-23) B9(20-23) B8(20-23) B9(20-23) BC(20-23) BD(20-23) BC(20-23) BD(20-23) + const __m512i rhs_mat_2367ABEF_2_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) BA(20-23) BB(20-23) BA(20-23) BB(20-23) BE(20-23) BF(20-23) BE(20-23) BF(20-23) + + const __m512i rhs_mat_014589CD_3_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) B8(28-31) B9(28-31) B8(28-31) B9(28-31) BC(28-31) BD(28-31) BC(28-31) BD(28-31) + const __m512i rhs_mat_2367ABEF_3_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) BA(28-31) BB(28-31) BA(28-31) BB(28-31) BE(28-31) BF(28-31) BE(28-31) BF(28-31) + + + // Scale values - Load the weight scale values of two block_tx8 + __m512 col_scale_f32; + if constexpr ( + std::is_same_v || + std::is_same_v) { + col_scale_f32 = GGML_F32Cx8x2_LOAD(b_ptr_0[b].d, b_ptr_1[b].d); + } + + // Load the four blocks of quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 + // Loaded as set of 128 bit vectors and repeated and stored into a 256 bit vector before again repeating into 512 bit vector + __m256i lhs_mat_ymm_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs))); + __m256i lhs_mat_ymm_01_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 0); + __m256i lhs_mat_ymm_23_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 17); + __m256i lhs_mat_ymm_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 32))); + __m256i lhs_mat_ymm_01_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 0); + __m256i lhs_mat_ymm_23_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 17); + __m256i lhs_mat_ymm_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 64))); + __m256i lhs_mat_ymm_01_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 0); + __m256i lhs_mat_ymm_23_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 17); + __m256i lhs_mat_ymm_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 96))); + __m256i lhs_mat_ymm_01_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 0); + __m256i lhs_mat_ymm_23_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 17); + + __m512i lhs_mat_01_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_0), lhs_mat_ymm_01_0, 1); + __m512i lhs_mat_23_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_0), lhs_mat_ymm_23_0, 1); + __m512i lhs_mat_01_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_1), lhs_mat_ymm_01_1, 1); + __m512i lhs_mat_23_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_1), lhs_mat_ymm_23_1, 1); + __m512i lhs_mat_01_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_2), lhs_mat_ymm_01_2, 1); + __m512i lhs_mat_23_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_2), lhs_mat_ymm_23_2, 1); + __m512i lhs_mat_01_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_3), lhs_mat_ymm_01_3, 1); + __m512i lhs_mat_23_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_3), lhs_mat_ymm_23_3, 1); + + // Shuffle pattern one - left side input + + const __m512i lhs_mat_01_0_sp1 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) + const __m512i lhs_mat_23_0_sp1 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) + + const __m512i lhs_mat_01_1_sp1 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) + const __m512i lhs_mat_23_1_sp1 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) + + const __m512i lhs_mat_01_2_sp1 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) + const __m512i lhs_mat_23_2_sp1 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) + + const __m512i lhs_mat_01_3_sp1 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) + const __m512i lhs_mat_23_3_sp1 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) + + // Shuffle pattern two - left side input + + const __m512i lhs_mat_01_0_sp2 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) + const __m512i lhs_mat_23_0_sp2 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) + + const __m512i lhs_mat_01_1_sp2 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) + const __m512i lhs_mat_23_1_sp2 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) + + const __m512i lhs_mat_01_2_sp2 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) + const __m512i lhs_mat_23_2_sp2 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) + + const __m512i lhs_mat_01_3_sp2 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) + const __m512i lhs_mat_23_3_sp2 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) + + // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane + // Resembles MMLAs into 2x2 matrices in ARM Version + const __m512i zero = _mm512_setzero_epi32(); + __m512i iacc_mat_00_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp1, rhs_mat_014589CD_3_sp1), lhs_mat_01_2_sp1, rhs_mat_014589CD_2_sp1), lhs_mat_01_1_sp1, rhs_mat_014589CD_1_sp1), lhs_mat_01_0_sp1, rhs_mat_014589CD_0_sp1); + __m512i iacc_mat_01_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp1, rhs_mat_2367ABEF_3_sp1), lhs_mat_01_2_sp1, rhs_mat_2367ABEF_2_sp1), lhs_mat_01_1_sp1, rhs_mat_2367ABEF_1_sp1), lhs_mat_01_0_sp1, rhs_mat_2367ABEF_0_sp1); + __m512i iacc_mat_10_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp1, rhs_mat_014589CD_3_sp1), lhs_mat_23_2_sp1, rhs_mat_014589CD_2_sp1), lhs_mat_23_1_sp1, rhs_mat_014589CD_1_sp1), lhs_mat_23_0_sp1, rhs_mat_014589CD_0_sp1); + __m512i iacc_mat_11_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp1, rhs_mat_2367ABEF_3_sp1), lhs_mat_23_2_sp1, rhs_mat_2367ABEF_2_sp1), lhs_mat_23_1_sp1, rhs_mat_2367ABEF_1_sp1), lhs_mat_23_0_sp1, rhs_mat_2367ABEF_0_sp1); + __m512i iacc_mat_00_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp2, rhs_mat_014589CD_3_sp2), lhs_mat_01_2_sp2, rhs_mat_014589CD_2_sp2), lhs_mat_01_1_sp2, rhs_mat_014589CD_1_sp2), lhs_mat_01_0_sp2, rhs_mat_014589CD_0_sp2); + __m512i iacc_mat_01_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp2, rhs_mat_2367ABEF_3_sp2), lhs_mat_01_2_sp2, rhs_mat_2367ABEF_2_sp2), lhs_mat_01_1_sp2, rhs_mat_2367ABEF_1_sp2), lhs_mat_01_0_sp2, rhs_mat_2367ABEF_0_sp2); + __m512i iacc_mat_10_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp2, rhs_mat_014589CD_3_sp2), lhs_mat_23_2_sp2, rhs_mat_014589CD_2_sp2), lhs_mat_23_1_sp2, rhs_mat_014589CD_1_sp2), lhs_mat_23_0_sp2, rhs_mat_014589CD_0_sp2); + __m512i iacc_mat_11_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp2, rhs_mat_2367ABEF_3_sp2), lhs_mat_23_2_sp2, rhs_mat_2367ABEF_2_sp2), lhs_mat_23_1_sp2, rhs_mat_2367ABEF_1_sp2), lhs_mat_23_0_sp2, rhs_mat_2367ABEF_0_sp2); + + // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block + __m512i iacc_mat_00 = _mm512_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); + __m512i iacc_mat_01 = _mm512_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); + __m512i iacc_mat_10 = _mm512_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); + __m512i iacc_mat_11 = _mm512_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); + + + // Straighten out to make 4 row vectors + __m512i iacc_row_0 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_00, _mm512_shuffle_epi32(iacc_mat_01, (_MM_PERM_ENUM)78)); + __m512i iacc_row_1 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_00, (_MM_PERM_ENUM)78), iacc_mat_01); + __m512i iacc_row_2 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_10, _mm512_shuffle_epi32(iacc_mat_11, (_MM_PERM_ENUM)78)); + __m512i iacc_row_3 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_10, (_MM_PERM_ENUM)78), iacc_mat_11); + + // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes + const __m128i row_scale_f16 = _mm_shuffle_epi32(_mm_maskload_epi32((int const*)(a_ptr[b].d), loadMask), 68); + const __m512 row_scale_f32 = GGML_F32Cx16_REPEAT_LOAD(row_scale_f16); + + // Multiply with appropiate scales and accumulate + acc_rows[0] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_0), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[0]); + acc_rows[1] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_1), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[1]); + acc_rows[2] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_2), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[2]); + acc_rows[3] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_3), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[3]); + } + + // Store the accumulated values + for (int i = 0; i < 4; i++) { + _mm512_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); + } + } + } + if (anc != nc) { + xstart = anc/8; + y = 0; + } +#endif // __AVX512F__ + + // Take group of four block_q8_0x4 structures at each pass of the loop and perform dot product operation + + for (; y < anr / 4; y += 4) { + const block_q8_0x4 * a_ptrs[4]; + + a_ptrs[0] = a_ptr_start + (y * nb); + for (int i = 0; i < 3; ++i) { + a_ptrs[i + 1] = a_ptrs[i] + nb; + } + + // Take group of eight block_tx8 structures at each pass of the loop and perform dot product operation + for (int64_t x = xstart; x < nc / 8; x++) { + + const block_tx8 * b_ptr = b_ptr_start + (x * b_nb); + + // Master FP accumulators + __m256 acc_rows[16]; + for (int i = 0; i < 16; i++) { + acc_rows[i] = _mm256_setzero_ps(); + } + + for (int64_t b = 0; b < nb; b++) { + // Load the eight blocks of quantized values interleaved with each other in chunks of eight - B0,B1 ....B6,B7 + const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs)); + const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 32)); + const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 64)); + const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 96)); + + // Save the values in the following vectors in the formats B0B1B4B5, B2B3B6B7 for further processing and storing of values + const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); + const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); + + // 4-bit -> 8-bit - Sign is maintained + const __m256i rhs_mat_0145_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_0, m4b)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) + const __m256i rhs_mat_2367_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_0, m4b)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) + + const __m256i rhs_mat_0145_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_1, m4b)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) + const __m256i rhs_mat_2367_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_1, m4b)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) + + const __m256i rhs_mat_0145_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_0, 4), m4b)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) + const __m256i rhs_mat_2367_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_0, 4), m4b)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) + + const __m256i rhs_mat_0145_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_1, 4), m4b)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) + const __m256i rhs_mat_2367_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_1, 4), m4b)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) + + // Shuffle pattern one - right side input + const __m256i rhs_mat_0145_0_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_0, 136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) + const __m256i rhs_mat_2367_0_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_0, 136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) + + const __m256i rhs_mat_0145_1_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_1, 136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) + const __m256i rhs_mat_2367_1_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_1, 136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) + + const __m256i rhs_mat_0145_2_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_2, 136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) + const __m256i rhs_mat_2367_2_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_2, 136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) + + const __m256i rhs_mat_0145_3_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_3, 136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) + const __m256i rhs_mat_2367_3_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_3, 136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) + + // Shuffle pattern two - right side input + + const __m256i rhs_mat_0145_0_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_0, 221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) + const __m256i rhs_mat_2367_0_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_0, 221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) + + const __m256i rhs_mat_0145_1_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_1, 221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) + const __m256i rhs_mat_2367_1_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_1, 221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) + + const __m256i rhs_mat_0145_2_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_2, 221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) + const __m256i rhs_mat_2367_2_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_2, 221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) + + const __m256i rhs_mat_0145_3_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_3, 221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) + const __m256i rhs_mat_2367_3_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_3, 221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) + + // Scale values - Load the wight scale values of block_tx8 + __m256 col_scale_f32; + if constexpr ( + std::is_same_v || + std::is_same_v) { + col_scale_f32 = GGML_F32Cx8_LOAD(b_ptr[b].d); + } + + // Process LHS in groups of four + for (int rp = 0; rp < 4; rp++) { + // Load the four blocks of quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 + // Loaded as set of 128 bit vectors and repeated into a 256 bit vector + __m256i lhs_mat_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs))); + __m256i lhs_mat_01_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 0); + __m256i lhs_mat_23_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 17); + __m256i lhs_mat_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 32))); + __m256i lhs_mat_01_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 0); + __m256i lhs_mat_23_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 17); + __m256i lhs_mat_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 64))); + __m256i lhs_mat_01_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 0); + __m256i lhs_mat_23_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 17); + __m256i lhs_mat_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 96))); + __m256i lhs_mat_01_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 0); + __m256i lhs_mat_23_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 17); + + // Shuffle pattern one - left side input + const __m256i lhs_mat_01_0_sp1 = _mm256_shuffle_epi32(lhs_mat_01_0, 160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) + const __m256i lhs_mat_23_0_sp1 = _mm256_shuffle_epi32(lhs_mat_23_0, 160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) + + const __m256i lhs_mat_01_1_sp1 = _mm256_shuffle_epi32(lhs_mat_01_1, 160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) + const __m256i lhs_mat_23_1_sp1 = _mm256_shuffle_epi32(lhs_mat_23_1, 160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) + + const __m256i lhs_mat_01_2_sp1 = _mm256_shuffle_epi32(lhs_mat_01_2, 160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) + const __m256i lhs_mat_23_2_sp1 = _mm256_shuffle_epi32(lhs_mat_23_2, 160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) + + const __m256i lhs_mat_01_3_sp1 = _mm256_shuffle_epi32(lhs_mat_01_3, 160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) + const __m256i lhs_mat_23_3_sp1 = _mm256_shuffle_epi32(lhs_mat_23_3, 160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) + + // Shuffle pattern two - left side input + const __m256i lhs_mat_01_0_sp2 = _mm256_shuffle_epi32(lhs_mat_01_0, 245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) + const __m256i lhs_mat_23_0_sp2 = _mm256_shuffle_epi32(lhs_mat_23_0, 245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) + + const __m256i lhs_mat_01_1_sp2 = _mm256_shuffle_epi32(lhs_mat_01_1, 245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) + const __m256i lhs_mat_23_1_sp2 = _mm256_shuffle_epi32(lhs_mat_23_1, 245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) + + const __m256i lhs_mat_01_2_sp2 = _mm256_shuffle_epi32(lhs_mat_01_2, 245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) + const __m256i lhs_mat_23_2_sp2 = _mm256_shuffle_epi32(lhs_mat_23_2, 245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) + + const __m256i lhs_mat_01_3_sp2 = _mm256_shuffle_epi32(lhs_mat_01_3, 245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) + const __m256i lhs_mat_23_3_sp2 = _mm256_shuffle_epi32(lhs_mat_23_3, 245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) + + // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane + // Resembles MMLAs into 2x2 matrices in ARM Version + const __m256i zero = _mm256_setzero_si256(); + __m256i iacc_mat_00_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp1, rhs_mat_0145_3_sp1), lhs_mat_01_2_sp1, rhs_mat_0145_2_sp1), lhs_mat_01_1_sp1, rhs_mat_0145_1_sp1), lhs_mat_01_0_sp1, rhs_mat_0145_0_sp1); + __m256i iacc_mat_01_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp1, rhs_mat_2367_3_sp1), lhs_mat_01_2_sp1, rhs_mat_2367_2_sp1), lhs_mat_01_1_sp1, rhs_mat_2367_1_sp1), lhs_mat_01_0_sp1, rhs_mat_2367_0_sp1); + __m256i iacc_mat_10_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp1, rhs_mat_0145_3_sp1), lhs_mat_23_2_sp1, rhs_mat_0145_2_sp1), lhs_mat_23_1_sp1, rhs_mat_0145_1_sp1), lhs_mat_23_0_sp1, rhs_mat_0145_0_sp1); + __m256i iacc_mat_11_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp1, rhs_mat_2367_3_sp1), lhs_mat_23_2_sp1, rhs_mat_2367_2_sp1), lhs_mat_23_1_sp1, rhs_mat_2367_1_sp1), lhs_mat_23_0_sp1, rhs_mat_2367_0_sp1); + __m256i iacc_mat_00_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp2, rhs_mat_0145_3_sp2), lhs_mat_01_2_sp2, rhs_mat_0145_2_sp2), lhs_mat_01_1_sp2, rhs_mat_0145_1_sp2), lhs_mat_01_0_sp2, rhs_mat_0145_0_sp2); + __m256i iacc_mat_01_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp2, rhs_mat_2367_3_sp2), lhs_mat_01_2_sp2, rhs_mat_2367_2_sp2), lhs_mat_01_1_sp2, rhs_mat_2367_1_sp2), lhs_mat_01_0_sp2, rhs_mat_2367_0_sp2); + __m256i iacc_mat_10_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp2, rhs_mat_0145_3_sp2), lhs_mat_23_2_sp2, rhs_mat_0145_2_sp2), lhs_mat_23_1_sp2, rhs_mat_0145_1_sp2), lhs_mat_23_0_sp2, rhs_mat_0145_0_sp2); + __m256i iacc_mat_11_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp2, rhs_mat_2367_3_sp2), lhs_mat_23_2_sp2, rhs_mat_2367_2_sp2), lhs_mat_23_1_sp2, rhs_mat_2367_1_sp2), lhs_mat_23_0_sp2, rhs_mat_2367_0_sp2); + + // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block + __m256i iacc_mat_00 = _mm256_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); + __m256i iacc_mat_01 = _mm256_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); + __m256i iacc_mat_10 = _mm256_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); + __m256i iacc_mat_11 = _mm256_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); + + // Straighten out to make 4 row vectors + __m256i iacc_row_0 = _mm256_blend_epi32(iacc_mat_00, _mm256_shuffle_epi32(iacc_mat_01, 78), 204); + __m256i iacc_row_1 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_00, 78), iacc_mat_01, 204); + __m256i iacc_row_2 = _mm256_blend_epi32(iacc_mat_10, _mm256_shuffle_epi32(iacc_mat_11, 78), 204); + __m256i iacc_row_3 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_10, 78), iacc_mat_11, 204); + + // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes + const __m256 row_scale_f32 = GGML_F32Cx8_REPEAT_LOAD(a_ptrs[rp][b].d, loadMask); + + // Multiply with appropiate scales and accumulate + acc_rows[rp * 4] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_0), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[rp * 4]); + acc_rows[rp * 4 + 1] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_1), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[rp * 4 + 1]); + acc_rows[rp * 4 + 2] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_2), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[rp * 4 + 2]); + acc_rows[rp * 4 + 3] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_3), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[rp * 4 + 3]); + } + } + + // Store the accumulated values + for (int i = 0; i < 16; i++) { + _mm256_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); + } + } + } + + // Take a block_q8_0x4 structures at each pass of the loop and perform dot product operation + for (; y < nr / 4; y ++) { + const block_q8_0x4 * a_ptr = a_ptr_start + (y * nb); + + // Load the eight blocks of quantized values interleaved with each other in chunks of eight - B0,B1 ....B6,B7 + for (int64_t x = xstart; x < nc / 8; x++) { + const block_tx8 * b_ptr = b_ptr_start + (x * b_nb); + + // Master FP accumulators + __m256 acc_rows[4]; + for (int i = 0; i < 4; i++) { + acc_rows[i] = _mm256_setzero_ps(); + } + + for (int64_t b = 0; b < nb; b++) { + // Load the eight block_q8_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....B6,B7 + const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs)); + const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 32)); + const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 64)); + const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 96)); + + // Save the values in the following vectors in the formats B0B1B4B5, B2B3B6B7 for further processing and storing of valuess + const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); + const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); + + // 4-bit -> 8-bit - Sign is maintained + const __m256i rhs_mat_0145_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_0, m4b)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) + const __m256i rhs_mat_2367_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_0, m4b)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) + + const __m256i rhs_mat_0145_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_1, m4b)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) + const __m256i rhs_mat_2367_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_1, m4b)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) + + const __m256i rhs_mat_0145_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_0, 4), m4b)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) + const __m256i rhs_mat_2367_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_0, 4), m4b)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) + + const __m256i rhs_mat_0145_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_1, 4), m4b)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) + const __m256i rhs_mat_2367_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_1, 4), m4b)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) + + // Shuffle pattern one - right side input + const __m256i rhs_mat_0145_0_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_0, 136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) + const __m256i rhs_mat_2367_0_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_0, 136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) + + const __m256i rhs_mat_0145_1_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_1, 136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) + const __m256i rhs_mat_2367_1_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_1, 136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) + + const __m256i rhs_mat_0145_2_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_2, 136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) + const __m256i rhs_mat_2367_2_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_2, 136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) + + const __m256i rhs_mat_0145_3_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_3, 136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) + const __m256i rhs_mat_2367_3_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_3, 136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) + + // Shuffle pattern two - right side input + + const __m256i rhs_mat_0145_0_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_0, 221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) + const __m256i rhs_mat_2367_0_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_0, 221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) + + const __m256i rhs_mat_0145_1_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_1, 221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) + const __m256i rhs_mat_2367_1_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_1, 221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) + + const __m256i rhs_mat_0145_2_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_2, 221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) + const __m256i rhs_mat_2367_2_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_2, 221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) + + const __m256i rhs_mat_0145_3_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_3, 221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) + const __m256i rhs_mat_2367_3_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_3, 221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) + + // Scale values - Load the wight scale values of block_tx8 + __m256 col_scale_f32; + if constexpr ( + std::is_same_v || + std::is_same_v) { + col_scale_f32 = GGML_F32Cx8_LOAD(b_ptr[b].d); + } + + // Load the four blocks of quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 + // Loaded as set of 128 bit vectors and repeated into a 256 bit vector + __m256i lhs_mat_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs))); + __m256i lhs_mat_01_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 0); + __m256i lhs_mat_23_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 17); + __m256i lhs_mat_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 32))); + __m256i lhs_mat_01_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 0); + __m256i lhs_mat_23_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 17); + __m256i lhs_mat_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 64))); + __m256i lhs_mat_01_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 0); + __m256i lhs_mat_23_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 17); + __m256i lhs_mat_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 96))); + __m256i lhs_mat_01_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 0); + __m256i lhs_mat_23_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 17); + + // Shuffle pattern one - left side input + + const __m256i lhs_mat_01_0_sp1 = _mm256_shuffle_epi32(lhs_mat_01_0, 160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) + const __m256i lhs_mat_23_0_sp1 = _mm256_shuffle_epi32(lhs_mat_23_0, 160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) + + const __m256i lhs_mat_01_1_sp1 = _mm256_shuffle_epi32(lhs_mat_01_1, 160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) + const __m256i lhs_mat_23_1_sp1 = _mm256_shuffle_epi32(lhs_mat_23_1, 160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) + + const __m256i lhs_mat_01_2_sp1 = _mm256_shuffle_epi32(lhs_mat_01_2, 160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) + const __m256i lhs_mat_23_2_sp1 = _mm256_shuffle_epi32(lhs_mat_23_2, 160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) + + const __m256i lhs_mat_01_3_sp1 = _mm256_shuffle_epi32(lhs_mat_01_3, 160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) + const __m256i lhs_mat_23_3_sp1 = _mm256_shuffle_epi32(lhs_mat_23_3, 160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) + + // Shuffle pattern two - left side input + + const __m256i lhs_mat_01_0_sp2 = _mm256_shuffle_epi32(lhs_mat_01_0, 245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) + const __m256i lhs_mat_23_0_sp2 = _mm256_shuffle_epi32(lhs_mat_23_0, 245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) + + const __m256i lhs_mat_01_1_sp2 = _mm256_shuffle_epi32(lhs_mat_01_1, 245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) + const __m256i lhs_mat_23_1_sp2 = _mm256_shuffle_epi32(lhs_mat_23_1, 245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) + + const __m256i lhs_mat_01_2_sp2 = _mm256_shuffle_epi32(lhs_mat_01_2, 245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) + const __m256i lhs_mat_23_2_sp2 = _mm256_shuffle_epi32(lhs_mat_23_2, 245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) + + const __m256i lhs_mat_01_3_sp2 = _mm256_shuffle_epi32(lhs_mat_01_3, 245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) + const __m256i lhs_mat_23_3_sp2 = _mm256_shuffle_epi32(lhs_mat_23_3, 245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) + + // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane + // Resembles MMLAs into 2x2 matrices in ARM Version + const __m256i zero = _mm256_setzero_si256(); + __m256i iacc_mat_00_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp1, rhs_mat_0145_3_sp1), lhs_mat_01_2_sp1, rhs_mat_0145_2_sp1), lhs_mat_01_1_sp1, rhs_mat_0145_1_sp1), lhs_mat_01_0_sp1, rhs_mat_0145_0_sp1); + __m256i iacc_mat_01_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp1, rhs_mat_2367_3_sp1), lhs_mat_01_2_sp1, rhs_mat_2367_2_sp1), lhs_mat_01_1_sp1, rhs_mat_2367_1_sp1), lhs_mat_01_0_sp1, rhs_mat_2367_0_sp1); + __m256i iacc_mat_10_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp1, rhs_mat_0145_3_sp1), lhs_mat_23_2_sp1, rhs_mat_0145_2_sp1), lhs_mat_23_1_sp1, rhs_mat_0145_1_sp1), lhs_mat_23_0_sp1, rhs_mat_0145_0_sp1); + __m256i iacc_mat_11_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp1, rhs_mat_2367_3_sp1), lhs_mat_23_2_sp1, rhs_mat_2367_2_sp1), lhs_mat_23_1_sp1, rhs_mat_2367_1_sp1), lhs_mat_23_0_sp1, rhs_mat_2367_0_sp1); + __m256i iacc_mat_00_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp2, rhs_mat_0145_3_sp2), lhs_mat_01_2_sp2, rhs_mat_0145_2_sp2), lhs_mat_01_1_sp2, rhs_mat_0145_1_sp2), lhs_mat_01_0_sp2, rhs_mat_0145_0_sp2); + __m256i iacc_mat_01_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp2, rhs_mat_2367_3_sp2), lhs_mat_01_2_sp2, rhs_mat_2367_2_sp2), lhs_mat_01_1_sp2, rhs_mat_2367_1_sp2), lhs_mat_01_0_sp2, rhs_mat_2367_0_sp2); + __m256i iacc_mat_10_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp2, rhs_mat_0145_3_sp2), lhs_mat_23_2_sp2, rhs_mat_0145_2_sp2), lhs_mat_23_1_sp2, rhs_mat_0145_1_sp2), lhs_mat_23_0_sp2, rhs_mat_0145_0_sp2); + __m256i iacc_mat_11_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp2, rhs_mat_2367_3_sp2), lhs_mat_23_2_sp2, rhs_mat_2367_2_sp2), lhs_mat_23_1_sp2, rhs_mat_2367_1_sp2), lhs_mat_23_0_sp2, rhs_mat_2367_0_sp2); + + // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block + __m256i iacc_mat_00 = _mm256_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); + __m256i iacc_mat_01 = _mm256_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); + __m256i iacc_mat_10 = _mm256_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); + __m256i iacc_mat_11 = _mm256_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); + + + // Straighten out to make 4 row vectors + __m256i iacc_row_0 = _mm256_blend_epi32(iacc_mat_00, _mm256_shuffle_epi32(iacc_mat_01, 78), 204); + __m256i iacc_row_1 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_00, 78), iacc_mat_01, 204); + __m256i iacc_row_2 = _mm256_blend_epi32(iacc_mat_10, _mm256_shuffle_epi32(iacc_mat_11, 78), 204); + __m256i iacc_row_3 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_10, 78), iacc_mat_11, 204); + + // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes + const __m256 row_scale_f32 = GGML_F32Cx8_REPEAT_LOAD(a_ptr[b].d, loadMask); + + // Multiply with appropiate scales and accumulate + acc_rows[0] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_0), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[0]); + acc_rows[1] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_1), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[1]); + acc_rows[2] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_2), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[2]); + acc_rows[3] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_3), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[3]); + } + + // Store the accumulated values + for (int i = 0; i < 4; i++) { + _mm256_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); + } + } + } +} + +#endif // defined(__AVX2__) || defined(__AVX512F__) + +void ggml_gemv_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { +#if defined(__AVX2__) || defined(__AVX512F__) + { + // Lookup table to convert signed nibbles to signed bytes + __m256i signextendlut = _mm256_castsi128_si256(_mm_set_epi8(-1, -2, -3, -4, -5, -6, -7, -8, 7, 6, 5, 4, 3, 2, 1, 0)); + signextendlut = _mm256_permute2f128_si256(signextendlut, signextendlut, 0); + + gemv_q4_b32_8x8_q8_0_lut_avx(n, s, bs, vx, vy, nr, nc, signextendlut); + + return; + } #endif + ggml_gemv_q4_0_8x8_q8_0_generic(n, s, bs, vx, vy, nr, nc); } @@ -849,6 +1612,19 @@ void ggml_gemv_q4_K_8x8_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const vo #endif } +void ggml_gemv_iq4_nl_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { +#if defined(__AVX2__) + __m256i signextendlut = _mm256_castsi128_si256(_mm_loadu_si128((const __m128i*)kvalues_iq4nl)); + signextendlut = _mm256_permute2f128_si256(signextendlut, signextendlut, 0); + + gemv_q4_b32_8x8_q8_0_lut_avx(n, s, bs, vx, vy, nr, nc, signextendlut); + + return; +#endif + + ggml_gemv_iq4_nl_8x8_q8_0_generic(n, s, bs, vx, vy, nr, nc); +} + void ggml_gemv_q2_K_8x8_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { const int qk = QK_K; const int nb = n / qk; @@ -1163,750 +1939,18 @@ void ggml_gemv_q2_K_8x8_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const vo } void ggml_gemm_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { - const int qk = QK8_0; - const int nb = n / qk; - const int ncols_interleaved = 8; - const int blocklen = 8; - - assert (n % qk == 0); - assert (nr % 4 == 0); - assert (nc % ncols_interleaved == 0); - - UNUSED(s); - UNUSED(bs); - UNUSED(vx); - UNUSED(vy); - UNUSED(nr); - UNUSED(nc); - UNUSED(nb); - UNUSED(ncols_interleaved); - UNUSED(blocklen); - #if defined(__AVX2__) || defined(__AVX512F__) { - const block_q4_0x8 * b_ptr_start = (const block_q4_0x8 *)vx; - const block_q8_0x4 * a_ptr_start = (const block_q8_0x4 *)vy; - int64_t b_nb = n / QK4_0; - int64_t y = 0; - // Mask to mask out nibbles from packed bytes - const __m256i m4b = _mm256_set1_epi8(0x0F); - const __m128i loadMask = _mm_blend_epi32(_mm_setzero_si128(), _mm_set1_epi32(0xFFFFFFFF), 3); // Lookup table to convert signed nibbles to signed bytes __m256i signextendlut = _mm256_castsi128_si256(_mm_set_epi8(-1, -2, -3, -4, -5, -6, -7, -8, 7, 6, 5, 4, 3, 2, 1, 0)); signextendlut = _mm256_permute2f128_si256(signextendlut, signextendlut, 0); - // Permute mask used for easier vector processing at later stages - __m256i requiredOrder = _mm256_set_epi32(3, 2, 1, 0, 7, 6, 5, 4); - int64_t xstart = 0; - int anr = nr - nr%16; // Used to align nr with boundary of 16 - #ifdef __AVX512F__ - int anc = nc - nc%16; // Used to align nc with boundary of 16 - // Mask to mask out nibbles from packed bytes expanded to 512 bit length - const __m512i m4bexpanded = _mm512_set1_epi8(0x0F); - // Lookup table to convert signed nibbles to signed bytes expanded to 512 bit length - __m512i signextendlutexpanded = _mm512_inserti32x8(_mm512_castsi256_si512(signextendlut), signextendlut, 1); - // Take group of four block_q8_0x4 structures at each pass of the loop and perform dot product operation - for (; y < anr / 4; y += 4) { + gemm_q4_b32_8x8_q8_0_lut_avx(n, s, bs, vx, vy, nr, nc, signextendlut); - const block_q8_0x4 * a_ptrs[4]; - - a_ptrs[0] = a_ptr_start + (y * nb); - for (int i = 0; i < 3; ++i) { - a_ptrs[i + 1] = a_ptrs[i] + nb; - } - - // Take group of two block_q4_0x8 structures at each pass of the loop and perform dot product operation - for (int64_t x = 0; x < anc / 8; x += 2) { - - const block_q4_0x8 * b_ptr_0 = b_ptr_start + ((x) * b_nb); - const block_q4_0x8 * b_ptr_1 = b_ptr_start + ((x + 1) * b_nb); - - // Master FP accumulators - __m512 acc_rows[16]; - for (int i = 0; i < 16; i++) { - acc_rows[i] = _mm512_setzero_ps(); - } - - for (int64_t b = 0; b < nb; b++) { - // Load the sixteen block_q4_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....BE,BF - const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs)); - const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 32)); - const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 64)); - const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 96)); - - const __m256i rhs_raw_mat_89AB_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs)); - const __m256i rhs_raw_mat_CDEF_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 32)); - const __m256i rhs_raw_mat_89AB_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 64)); - const __m256i rhs_raw_mat_CDEF_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 96)); - - // Save the values in the following vectors in the formats B0B1B4B5B8B9BCBD, B2B3B6B7BABBBEBF for further processing and storing of values - const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); - const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); - const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); - const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); - - const __m256i rhs_raw_mat_89CD_0 = _mm256_blend_epi32(rhs_raw_mat_89AB_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_0, requiredOrder), 240); - const __m256i rhs_raw_mat_ABEF_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_0, requiredOrder), rhs_raw_mat_CDEF_0, 240); - const __m256i rhs_raw_mat_89CD_1 = _mm256_blend_epi32(rhs_raw_mat_89AB_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_1, requiredOrder), 240); - const __m256i rhs_raw_mat_ABEF_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_1, requiredOrder), rhs_raw_mat_CDEF_1, 240); - - const __m512i rhs_raw_mat_014589CD_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_0), rhs_raw_mat_89CD_0, 1); - const __m512i rhs_raw_mat_2367ABEF_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_0), rhs_raw_mat_ABEF_0, 1); - const __m512i rhs_raw_mat_014589CD_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_1), rhs_raw_mat_89CD_1, 1); - const __m512i rhs_raw_mat_2367ABEF_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_1), rhs_raw_mat_ABEF_1, 1); - - // 4-bit -> 8-bit - Sign is maintained - const __m512i rhs_mat_014589CD_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_0, m4bexpanded)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) B8(0-7) B9(0-7) BC(0-7) BD(0-7) - const __m512i rhs_mat_2367ABEF_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_0, m4bexpanded)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) BA(0-7) BB(0-7) BE(0-7) BF(0-7) - - const __m512i rhs_mat_014589CD_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_1, m4bexpanded)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) B8(8-15) B9(8-15) BC(8-15) BD(8-15) - const __m512i rhs_mat_2367ABEF_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_1, m4bexpanded)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) BA(8-15) BB(8-15) BE(8-15) BF(8-15) - - const __m512i rhs_mat_014589CD_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_0, 4), m4bexpanded)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) B8(16-23) B9(16-23) BC(16-23) BD(16-23) - const __m512i rhs_mat_2367ABEF_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_0, 4), m4bexpanded)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) BA(16-23) BB(16-23) BE(16-23) BF(16-23) - - const __m512i rhs_mat_014589CD_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_1, 4), m4bexpanded)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) B8(24-31) B9(24-31) BC(24-31) BD(24-31) - const __m512i rhs_mat_2367ABEF_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_1, 4), m4bexpanded)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) BA(24-31) BB(24-31) BE(24-31) BF(24-31) - - // Shuffle pattern one - right side input - const __m512i rhs_mat_014589CD_0_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) B8(0-3) B9(0-3) B8(0-3) B9(0-3) BC(0-3) BD(0-3) BC(0-3) BD(0-3) - const __m512i rhs_mat_2367ABEF_0_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) BA(0-3) BB(0-3) BA(0-3) BB(0-3) BE(0-3) BF(0-3) BE(0-3) BF(0-3) - - const __m512i rhs_mat_014589CD_1_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) B8(8-11) B9(8-11) B8(8-11) B9(8-11) BC(8-11) BD(8-11) BC(8-11) BD(8-11) - const __m512i rhs_mat_2367ABEF_1_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) BA(8-11) BB(8-11) BA(8-11) BB(8-11) BE(8-11) BF(8-11) BE(8-11) BF(8-11) - - const __m512i rhs_mat_014589CD_2_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) B8(16-19) B9(16-19) B8(16-19) B9(16-19) BC(16-19) BD(16-19) BC(16-19) BD(16-19) - const __m512i rhs_mat_2367ABEF_2_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) BA(16-19) BB(16-19) BA(16-19) BB(16-19) BE(16-19) BF(16-19) BE(16-19) BF(16-19) - - const __m512i rhs_mat_014589CD_3_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) B8(24-27) B9(24-27) B8(24-27) B9(24-27) BC(24-27) BD(24-27) BC(24-27) BD(24-27) - const __m512i rhs_mat_2367ABEF_3_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) BA(24-27) BB(24-27) BA(24-27) BB(24-27) BE(24-27) BF(24-27) BE(24-27) BF(24-27) - - // Shuffle pattern two - right side input - - const __m512i rhs_mat_014589CD_0_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) B8(4-7) B9(4-7) B8(4-7) B9(4-7) BC(4-7) BD(4-7) BC(4-7) BD(4-7) - const __m512i rhs_mat_2367ABEF_0_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) BA(4-7) BB(4-7) BA(4-7) BB(4-7) BE(4-7) BF(4-7) BE(4-7) BF(4-7) - - const __m512i rhs_mat_014589CD_1_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) B8(12-15) B9(12-15) B8(12-15) B9(12-15) BC(12-15) BD(12-15) BC(12-15) BD(12-15) - const __m512i rhs_mat_2367ABEF_1_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) BA(12-15) BB(12-15) BA(12-15) BB(12-15) BE(12-15) BF(12-15) BE(12-15) BF(12-15) - - const __m512i rhs_mat_014589CD_2_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) B8(20-23) B9(20-23) B8(20-23) B9(20-23) BC(20-23) BD(20-23) BC(20-23) BD(20-23) - const __m512i rhs_mat_2367ABEF_2_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) BA(20-23) BB(20-23) BA(20-23) BB(20-23) BE(20-23) BF(20-23) BE(20-23) BF(20-23) - - const __m512i rhs_mat_014589CD_3_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) B8(28-31) B9(28-31) B8(28-31) B9(28-31) BC(28-31) BD(28-31) BC(28-31) BD(28-31) - const __m512i rhs_mat_2367ABEF_3_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) BA(28-31) BB(28-31) BA(28-31) BB(28-31) BE(28-31) BF(28-31) BE(28-31) BF(28-31) - - // Scale values - Load the weight scale values of two block_q4_0x8 - const __m512 col_scale_f32 = GGML_F32Cx8x2_LOAD(b_ptr_0[b].d, b_ptr_1[b].d); - - // Process LHS in pairs of rows - for (int rp = 0; rp < 4; rp++) { - - // Load the four block_q4_0 quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 - // Loaded as set of 128 bit vectors and repeated and stored into a 256 bit vector before again repeating into 512 bit vector - __m256i lhs_mat_ymm_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs))); - __m256i lhs_mat_ymm_01_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 0); - __m256i lhs_mat_ymm_23_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 17); - __m256i lhs_mat_ymm_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 32))); - __m256i lhs_mat_ymm_01_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 0); - __m256i lhs_mat_ymm_23_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 17); - __m256i lhs_mat_ymm_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 64))); - __m256i lhs_mat_ymm_01_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 0); - __m256i lhs_mat_ymm_23_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 17); - __m256i lhs_mat_ymm_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 96))); - __m256i lhs_mat_ymm_01_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 0); - __m256i lhs_mat_ymm_23_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 17); - - __m512i lhs_mat_01_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_0), lhs_mat_ymm_01_0, 1); - __m512i lhs_mat_23_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_0), lhs_mat_ymm_23_0, 1); - __m512i lhs_mat_01_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_1), lhs_mat_ymm_01_1, 1); - __m512i lhs_mat_23_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_1), lhs_mat_ymm_23_1, 1); - __m512i lhs_mat_01_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_2), lhs_mat_ymm_01_2, 1); - __m512i lhs_mat_23_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_2), lhs_mat_ymm_23_2, 1); - __m512i lhs_mat_01_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_3), lhs_mat_ymm_01_3, 1); - __m512i lhs_mat_23_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_3), lhs_mat_ymm_23_3, 1); - - // Shuffle pattern one - left side input - - const __m512i lhs_mat_01_0_sp1 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) - const __m512i lhs_mat_23_0_sp1 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) - - const __m512i lhs_mat_01_1_sp1 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) - const __m512i lhs_mat_23_1_sp1 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) - - const __m512i lhs_mat_01_2_sp1 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) - const __m512i lhs_mat_23_2_sp1 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) - - const __m512i lhs_mat_01_3_sp1 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) - const __m512i lhs_mat_23_3_sp1 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) - - // Shuffle pattern two - left side input - - const __m512i lhs_mat_01_0_sp2 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) - const __m512i lhs_mat_23_0_sp2 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) - - const __m512i lhs_mat_01_1_sp2 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) - const __m512i lhs_mat_23_1_sp2 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) - - const __m512i lhs_mat_01_2_sp2 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) - const __m512i lhs_mat_23_2_sp2 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) - - const __m512i lhs_mat_01_3_sp2 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) - const __m512i lhs_mat_23_3_sp2 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) - - // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane - // Resembles MMLAs into 2x2 matrices in ARM Version - const __m512i zero = _mm512_setzero_epi32(); - __m512i iacc_mat_00_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp1, rhs_mat_014589CD_3_sp1), lhs_mat_01_2_sp1, rhs_mat_014589CD_2_sp1), lhs_mat_01_1_sp1, rhs_mat_014589CD_1_sp1), lhs_mat_01_0_sp1, rhs_mat_014589CD_0_sp1); - __m512i iacc_mat_01_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp1, rhs_mat_2367ABEF_3_sp1), lhs_mat_01_2_sp1, rhs_mat_2367ABEF_2_sp1), lhs_mat_01_1_sp1, rhs_mat_2367ABEF_1_sp1), lhs_mat_01_0_sp1, rhs_mat_2367ABEF_0_sp1); - __m512i iacc_mat_10_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp1, rhs_mat_014589CD_3_sp1), lhs_mat_23_2_sp1, rhs_mat_014589CD_2_sp1), lhs_mat_23_1_sp1, rhs_mat_014589CD_1_sp1), lhs_mat_23_0_sp1, rhs_mat_014589CD_0_sp1); - __m512i iacc_mat_11_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp1, rhs_mat_2367ABEF_3_sp1), lhs_mat_23_2_sp1, rhs_mat_2367ABEF_2_sp1), lhs_mat_23_1_sp1, rhs_mat_2367ABEF_1_sp1), lhs_mat_23_0_sp1, rhs_mat_2367ABEF_0_sp1); - __m512i iacc_mat_00_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp2, rhs_mat_014589CD_3_sp2), lhs_mat_01_2_sp2, rhs_mat_014589CD_2_sp2), lhs_mat_01_1_sp2, rhs_mat_014589CD_1_sp2), lhs_mat_01_0_sp2, rhs_mat_014589CD_0_sp2); - __m512i iacc_mat_01_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp2, rhs_mat_2367ABEF_3_sp2), lhs_mat_01_2_sp2, rhs_mat_2367ABEF_2_sp2), lhs_mat_01_1_sp2, rhs_mat_2367ABEF_1_sp2), lhs_mat_01_0_sp2, rhs_mat_2367ABEF_0_sp2); - __m512i iacc_mat_10_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp2, rhs_mat_014589CD_3_sp2), lhs_mat_23_2_sp2, rhs_mat_014589CD_2_sp2), lhs_mat_23_1_sp2, rhs_mat_014589CD_1_sp2), lhs_mat_23_0_sp2, rhs_mat_014589CD_0_sp2); - __m512i iacc_mat_11_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp2, rhs_mat_2367ABEF_3_sp2), lhs_mat_23_2_sp2, rhs_mat_2367ABEF_2_sp2), lhs_mat_23_1_sp2, rhs_mat_2367ABEF_1_sp2), lhs_mat_23_0_sp2, rhs_mat_2367ABEF_0_sp2); - - // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block - __m512i iacc_mat_00 = _mm512_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); - __m512i iacc_mat_01 = _mm512_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); - __m512i iacc_mat_10 = _mm512_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); - __m512i iacc_mat_11 = _mm512_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); - - - // Straighten out to make 4 row vectors - __m512i iacc_row_0 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_00, _mm512_shuffle_epi32(iacc_mat_01, (_MM_PERM_ENUM)78)); - __m512i iacc_row_1 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_00, (_MM_PERM_ENUM)78), iacc_mat_01); - __m512i iacc_row_2 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_10, _mm512_shuffle_epi32(iacc_mat_11, (_MM_PERM_ENUM)78)); - __m512i iacc_row_3 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_10, (_MM_PERM_ENUM)78), iacc_mat_11); - - // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes - const __m128i row_scale_f16 = _mm_shuffle_epi32(_mm_maskload_epi32((int const*)(a_ptrs[rp][b].d), loadMask), 68); - const __m512 row_scale_f32 = GGML_F32Cx16_REPEAT_LOAD(row_scale_f16); - - // Multiply with appropiate scales and accumulate - acc_rows[rp * 4] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_0), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[rp * 4]); - acc_rows[rp * 4 + 1] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_1), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[rp * 4 + 1]); - acc_rows[rp * 4 + 2] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_2), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[rp * 4 + 2]); - acc_rows[rp * 4 + 3] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_3), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[rp * 4 + 3]); - } - } - - // Store the accumulated values - for (int i = 0; i < 16; i++) { - _mm512_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); - } - } - } - // Take a block_q8_0x4 structures at each pass of the loop and perform dot product operation - for (; y < nr / 4; y ++) { - - const block_q8_0x4 * a_ptr = a_ptr_start + (y * nb); - - // Take group of two block_q4_0x8 structures at each pass of the loop and perform dot product operation - for (int64_t x = 0; x < anc / 8; x += 2) { - - const block_q4_0x8 * b_ptr_0 = b_ptr_start + ((x) * b_nb); - const block_q4_0x8 * b_ptr_1 = b_ptr_start + ((x + 1) * b_nb); - - // Master FP accumulators - __m512 acc_rows[4]; - for (int i = 0; i < 4; i++) { - acc_rows[i] = _mm512_setzero_ps(); - } - - for (int64_t b = 0; b < nb; b++) { - // Load the sixteen block_q4_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....BE,BF - const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs)); - const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 32)); - const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 64)); - const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 96)); - - const __m256i rhs_raw_mat_89AB_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs)); - const __m256i rhs_raw_mat_CDEF_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 32)); - const __m256i rhs_raw_mat_89AB_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 64)); - const __m256i rhs_raw_mat_CDEF_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 96)); - - // Save the values in the following vectors in the formats B0B1B4B5, B2B3B6B7 for further processing and storing of valuess - const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); - const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); - const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); - const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); - - const __m256i rhs_raw_mat_89CD_0 = _mm256_blend_epi32(rhs_raw_mat_89AB_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_0, requiredOrder), 240); - const __m256i rhs_raw_mat_ABEF_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_0, requiredOrder), rhs_raw_mat_CDEF_0, 240); - const __m256i rhs_raw_mat_89CD_1 = _mm256_blend_epi32(rhs_raw_mat_89AB_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_1, requiredOrder), 240); - const __m256i rhs_raw_mat_ABEF_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_1, requiredOrder), rhs_raw_mat_CDEF_1, 240); - - const __m512i rhs_raw_mat_014589CD_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_0), rhs_raw_mat_89CD_0, 1); - const __m512i rhs_raw_mat_2367ABEF_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_0), rhs_raw_mat_ABEF_0, 1); - const __m512i rhs_raw_mat_014589CD_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_1), rhs_raw_mat_89CD_1, 1); - const __m512i rhs_raw_mat_2367ABEF_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_1), rhs_raw_mat_ABEF_1, 1); - - // 4-bit -> 8-bit - Sign is maintained - const __m512i rhs_mat_014589CD_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_0, m4bexpanded)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) B8(0-7) B9(0-7) BC(0-7) BD(0-7) - const __m512i rhs_mat_2367ABEF_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_0, m4bexpanded)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) BA(0-7) BB(0-7) BE(0-7) BF(0-7) - - const __m512i rhs_mat_014589CD_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_1, m4bexpanded)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) B8(8-15) B9(8-15) BC(8-15) BD(8-15) - const __m512i rhs_mat_2367ABEF_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_1, m4bexpanded)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) BA(8-15) BB(8-15) BE(8-15) BF(8-15) - - const __m512i rhs_mat_014589CD_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_0, 4), m4bexpanded)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) B8(16-23) B9(16-23) BC(16-23) BD(16-23) - const __m512i rhs_mat_2367ABEF_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_0, 4), m4bexpanded)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) BA(16-23) BB(16-23) BE(16-23) BF(16-23) - - const __m512i rhs_mat_014589CD_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_1, 4), m4bexpanded)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) B8(24-31) B9(24-31) BC(24-31) BD(24-31) - const __m512i rhs_mat_2367ABEF_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_1, 4), m4bexpanded)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) BA(24-31) BB(24-31) BE(24-31) BF(24-31) - - // Shuffle pattern one - right side input - const __m512i rhs_mat_014589CD_0_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) B8(0-3) B9(0-3) B8(0-3) B9(0-3) BC(0-3) BD(0-3) BC(0-3) BD(0-3) - const __m512i rhs_mat_2367ABEF_0_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) BA(0-3) BB(0-3) BA(0-3) BB(0-3) BE(0-3) BF(0-3) BE(0-3) BF(0-3) - - const __m512i rhs_mat_014589CD_1_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) B8(8-11) B9(8-11) B8(8-11) B9(8-11) BC(8-11) BD(8-11) BC(8-11) BD(8-11) - const __m512i rhs_mat_2367ABEF_1_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) BA(8-11) BB(8-11) BA(8-11) BB(8-11) BE(8-11) BF(8-11) BE(8-11) BF(8-11) - - const __m512i rhs_mat_014589CD_2_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) B8(16-19) B9(16-19) B8(16-19) B9(16-19) BC(16-19) BD(16-19) BC(16-19) BD(16-19) - const __m512i rhs_mat_2367ABEF_2_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) BA(16-19) BB(16-19) BA(16-19) BB(16-19) BE(16-19) BF(16-19) BE(16-19) BF(16-19) - - const __m512i rhs_mat_014589CD_3_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) B8(24-27) B9(24-27) B8(24-27) B9(24-27) BC(24-27) BD(24-27) BC(24-27) BD(24-27) - const __m512i rhs_mat_2367ABEF_3_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) BA(24-27) BB(24-27) BA(24-27) BB(24-27) BE(24-27) BF(24-27) BE(24-27) BF(24-27) - - // Shuffle pattern two - right side input - - const __m512i rhs_mat_014589CD_0_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) B8(4-7) B9(4-7) B8(4-7) B9(4-7) BC(4-7) BD(4-7) BC(4-7) BD(4-7) - const __m512i rhs_mat_2367ABEF_0_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) BA(4-7) BB(4-7) BA(4-7) BB(4-7) BE(4-7) BF(4-7) BE(4-7) BF(4-7) - - const __m512i rhs_mat_014589CD_1_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) B8(12-15) B9(12-15) B8(12-15) B9(12-15) BC(12-15) BD(12-15) BC(12-15) BD(12-15) - const __m512i rhs_mat_2367ABEF_1_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) BA(12-15) BB(12-15) BA(12-15) BB(12-15) BE(12-15) BF(12-15) BE(12-15) BF(12-15) - - const __m512i rhs_mat_014589CD_2_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) B8(20-23) B9(20-23) B8(20-23) B9(20-23) BC(20-23) BD(20-23) BC(20-23) BD(20-23) - const __m512i rhs_mat_2367ABEF_2_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) BA(20-23) BB(20-23) BA(20-23) BB(20-23) BE(20-23) BF(20-23) BE(20-23) BF(20-23) - - const __m512i rhs_mat_014589CD_3_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) B8(28-31) B9(28-31) B8(28-31) B9(28-31) BC(28-31) BD(28-31) BC(28-31) BD(28-31) - const __m512i rhs_mat_2367ABEF_3_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) BA(28-31) BB(28-31) BA(28-31) BB(28-31) BE(28-31) BF(28-31) BE(28-31) BF(28-31) - - - // Scale values - Load the weight scale values of two block_q4_0x8 - const __m512 col_scale_f32 = GGML_F32Cx8x2_LOAD(b_ptr_0[b].d, b_ptr_1[b].d); - - // Load the four block_q4_0 quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 - // Loaded as set of 128 bit vectors and repeated and stored into a 256 bit vector before again repeating into 512 bit vector - __m256i lhs_mat_ymm_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs))); - __m256i lhs_mat_ymm_01_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 0); - __m256i lhs_mat_ymm_23_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 17); - __m256i lhs_mat_ymm_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 32))); - __m256i lhs_mat_ymm_01_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 0); - __m256i lhs_mat_ymm_23_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 17); - __m256i lhs_mat_ymm_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 64))); - __m256i lhs_mat_ymm_01_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 0); - __m256i lhs_mat_ymm_23_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 17); - __m256i lhs_mat_ymm_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 96))); - __m256i lhs_mat_ymm_01_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 0); - __m256i lhs_mat_ymm_23_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 17); - - __m512i lhs_mat_01_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_0), lhs_mat_ymm_01_0, 1); - __m512i lhs_mat_23_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_0), lhs_mat_ymm_23_0, 1); - __m512i lhs_mat_01_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_1), lhs_mat_ymm_01_1, 1); - __m512i lhs_mat_23_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_1), lhs_mat_ymm_23_1, 1); - __m512i lhs_mat_01_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_2), lhs_mat_ymm_01_2, 1); - __m512i lhs_mat_23_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_2), lhs_mat_ymm_23_2, 1); - __m512i lhs_mat_01_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_3), lhs_mat_ymm_01_3, 1); - __m512i lhs_mat_23_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_3), lhs_mat_ymm_23_3, 1); - - // Shuffle pattern one - left side input - - const __m512i lhs_mat_01_0_sp1 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) - const __m512i lhs_mat_23_0_sp1 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) - - const __m512i lhs_mat_01_1_sp1 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) - const __m512i lhs_mat_23_1_sp1 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) - - const __m512i lhs_mat_01_2_sp1 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) - const __m512i lhs_mat_23_2_sp1 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) - - const __m512i lhs_mat_01_3_sp1 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) - const __m512i lhs_mat_23_3_sp1 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) - - // Shuffle pattern two - left side input - - const __m512i lhs_mat_01_0_sp2 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) - const __m512i lhs_mat_23_0_sp2 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) - - const __m512i lhs_mat_01_1_sp2 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) - const __m512i lhs_mat_23_1_sp2 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) - - const __m512i lhs_mat_01_2_sp2 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) - const __m512i lhs_mat_23_2_sp2 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) - - const __m512i lhs_mat_01_3_sp2 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) - const __m512i lhs_mat_23_3_sp2 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) - - // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane - // Resembles MMLAs into 2x2 matrices in ARM Version - const __m512i zero = _mm512_setzero_epi32(); - __m512i iacc_mat_00_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp1, rhs_mat_014589CD_3_sp1), lhs_mat_01_2_sp1, rhs_mat_014589CD_2_sp1), lhs_mat_01_1_sp1, rhs_mat_014589CD_1_sp1), lhs_mat_01_0_sp1, rhs_mat_014589CD_0_sp1); - __m512i iacc_mat_01_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp1, rhs_mat_2367ABEF_3_sp1), lhs_mat_01_2_sp1, rhs_mat_2367ABEF_2_sp1), lhs_mat_01_1_sp1, rhs_mat_2367ABEF_1_sp1), lhs_mat_01_0_sp1, rhs_mat_2367ABEF_0_sp1); - __m512i iacc_mat_10_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp1, rhs_mat_014589CD_3_sp1), lhs_mat_23_2_sp1, rhs_mat_014589CD_2_sp1), lhs_mat_23_1_sp1, rhs_mat_014589CD_1_sp1), lhs_mat_23_0_sp1, rhs_mat_014589CD_0_sp1); - __m512i iacc_mat_11_sp1 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp1, rhs_mat_2367ABEF_3_sp1), lhs_mat_23_2_sp1, rhs_mat_2367ABEF_2_sp1), lhs_mat_23_1_sp1, rhs_mat_2367ABEF_1_sp1), lhs_mat_23_0_sp1, rhs_mat_2367ABEF_0_sp1); - __m512i iacc_mat_00_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp2, rhs_mat_014589CD_3_sp2), lhs_mat_01_2_sp2, rhs_mat_014589CD_2_sp2), lhs_mat_01_1_sp2, rhs_mat_014589CD_1_sp2), lhs_mat_01_0_sp2, rhs_mat_014589CD_0_sp2); - __m512i iacc_mat_01_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_01_3_sp2, rhs_mat_2367ABEF_3_sp2), lhs_mat_01_2_sp2, rhs_mat_2367ABEF_2_sp2), lhs_mat_01_1_sp2, rhs_mat_2367ABEF_1_sp2), lhs_mat_01_0_sp2, rhs_mat_2367ABEF_0_sp2); - __m512i iacc_mat_10_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp2, rhs_mat_014589CD_3_sp2), lhs_mat_23_2_sp2, rhs_mat_014589CD_2_sp2), lhs_mat_23_1_sp2, rhs_mat_014589CD_1_sp2), lhs_mat_23_0_sp2, rhs_mat_014589CD_0_sp2); - __m512i iacc_mat_11_sp2 = mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(mul_sum_i8_pairs_acc_int32x16(zero, lhs_mat_23_3_sp2, rhs_mat_2367ABEF_3_sp2), lhs_mat_23_2_sp2, rhs_mat_2367ABEF_2_sp2), lhs_mat_23_1_sp2, rhs_mat_2367ABEF_1_sp2), lhs_mat_23_0_sp2, rhs_mat_2367ABEF_0_sp2); - - // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block - __m512i iacc_mat_00 = _mm512_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); - __m512i iacc_mat_01 = _mm512_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); - __m512i iacc_mat_10 = _mm512_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); - __m512i iacc_mat_11 = _mm512_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); - - - // Straighten out to make 4 row vectors - __m512i iacc_row_0 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_00, _mm512_shuffle_epi32(iacc_mat_01, (_MM_PERM_ENUM)78)); - __m512i iacc_row_1 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_00, (_MM_PERM_ENUM)78), iacc_mat_01); - __m512i iacc_row_2 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_10, _mm512_shuffle_epi32(iacc_mat_11, (_MM_PERM_ENUM)78)); - __m512i iacc_row_3 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_10, (_MM_PERM_ENUM)78), iacc_mat_11); - - // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes - const __m128i row_scale_f16 = _mm_shuffle_epi32(_mm_maskload_epi32((int const*)(a_ptr[b].d), loadMask), 68); - const __m512 row_scale_f32 = GGML_F32Cx16_REPEAT_LOAD(row_scale_f16); - - // Multiply with appropiate scales and accumulate - acc_rows[0] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_0), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[0]); - acc_rows[1] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_1), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[1]); - acc_rows[2] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_2), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[2]); - acc_rows[3] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_3), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[3]); - } - - // Store the accumulated values - for (int i = 0; i < 4; i++) { - _mm512_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); - } - } - } - if (anc != nc) { - xstart = anc/8; - y = 0; - } - #endif // __AVX512F__ - - // Take group of four block_q8_0x4 structures at each pass of the loop and perform dot product operation - - for (; y < anr / 4; y += 4) { - const block_q8_0x4 * a_ptrs[4]; - - a_ptrs[0] = a_ptr_start + (y * nb); - for (int i = 0; i < 3; ++i) { - a_ptrs[i + 1] = a_ptrs[i] + nb; - } - - // Take group of eight block_q4_0x8 structures at each pass of the loop and perform dot product operation - for (int64_t x = xstart; x < nc / 8; x++) { - - const block_q4_0x8 * b_ptr = b_ptr_start + (x * b_nb); - - // Master FP accumulators - __m256 acc_rows[16]; - for (int i = 0; i < 16; i++) { - acc_rows[i] = _mm256_setzero_ps(); - } - - for (int64_t b = 0; b < nb; b++) { - // Load the eight block_q4_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....B6,B7 - const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs)); - const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 32)); - const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 64)); - const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 96)); - - // Save the values in the following vectors in the formats B0B1B4B5, B2B3B6B7 for further processing and storing of values - const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); - const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); - const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); - const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); - - // 4-bit -> 8-bit - Sign is maintained - const __m256i rhs_mat_0145_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_0, m4b)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) - const __m256i rhs_mat_2367_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_0, m4b)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) - - const __m256i rhs_mat_0145_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_1, m4b)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) - const __m256i rhs_mat_2367_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_1, m4b)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) - - const __m256i rhs_mat_0145_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_0, 4), m4b)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) - const __m256i rhs_mat_2367_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_0, 4), m4b)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) - - const __m256i rhs_mat_0145_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_1, 4), m4b)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) - const __m256i rhs_mat_2367_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_1, 4), m4b)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) - - // Shuffle pattern one - right side input - const __m256i rhs_mat_0145_0_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_0, 136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) - const __m256i rhs_mat_2367_0_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_0, 136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) - - const __m256i rhs_mat_0145_1_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_1, 136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) - const __m256i rhs_mat_2367_1_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_1, 136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) - - const __m256i rhs_mat_0145_2_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_2, 136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) - const __m256i rhs_mat_2367_2_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_2, 136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) - - const __m256i rhs_mat_0145_3_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_3, 136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) - const __m256i rhs_mat_2367_3_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_3, 136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) - - // Shuffle pattern two - right side input - - const __m256i rhs_mat_0145_0_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_0, 221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) - const __m256i rhs_mat_2367_0_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_0, 221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) - - const __m256i rhs_mat_0145_1_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_1, 221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) - const __m256i rhs_mat_2367_1_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_1, 221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) - - const __m256i rhs_mat_0145_2_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_2, 221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) - const __m256i rhs_mat_2367_2_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_2, 221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) - - const __m256i rhs_mat_0145_3_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_3, 221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) - const __m256i rhs_mat_2367_3_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_3, 221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) - - // Scale values - Load the wight scale values of block_q4_0x8 - const __m256 col_scale_f32 = GGML_F32Cx8_LOAD(b_ptr[b].d); - - // Process LHS in groups of four - for (int rp = 0; rp < 4; rp++) { - // Load the four block_q4_0 quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 - // Loaded as set of 128 bit vectors and repeated into a 256 bit vector - __m256i lhs_mat_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs))); - __m256i lhs_mat_01_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 0); - __m256i lhs_mat_23_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 17); - __m256i lhs_mat_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 32))); - __m256i lhs_mat_01_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 0); - __m256i lhs_mat_23_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 17); - __m256i lhs_mat_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 64))); - __m256i lhs_mat_01_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 0); - __m256i lhs_mat_23_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 17); - __m256i lhs_mat_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 96))); - __m256i lhs_mat_01_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 0); - __m256i lhs_mat_23_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 17); - - // Shuffle pattern one - left side input - const __m256i lhs_mat_01_0_sp1 = _mm256_shuffle_epi32(lhs_mat_01_0, 160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) - const __m256i lhs_mat_23_0_sp1 = _mm256_shuffle_epi32(lhs_mat_23_0, 160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) - - const __m256i lhs_mat_01_1_sp1 = _mm256_shuffle_epi32(lhs_mat_01_1, 160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) - const __m256i lhs_mat_23_1_sp1 = _mm256_shuffle_epi32(lhs_mat_23_1, 160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) - - const __m256i lhs_mat_01_2_sp1 = _mm256_shuffle_epi32(lhs_mat_01_2, 160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) - const __m256i lhs_mat_23_2_sp1 = _mm256_shuffle_epi32(lhs_mat_23_2, 160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) - - const __m256i lhs_mat_01_3_sp1 = _mm256_shuffle_epi32(lhs_mat_01_3, 160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) - const __m256i lhs_mat_23_3_sp1 = _mm256_shuffle_epi32(lhs_mat_23_3, 160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) - - // Shuffle pattern two - left side input - const __m256i lhs_mat_01_0_sp2 = _mm256_shuffle_epi32(lhs_mat_01_0, 245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) - const __m256i lhs_mat_23_0_sp2 = _mm256_shuffle_epi32(lhs_mat_23_0, 245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) - - const __m256i lhs_mat_01_1_sp2 = _mm256_shuffle_epi32(lhs_mat_01_1, 245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) - const __m256i lhs_mat_23_1_sp2 = _mm256_shuffle_epi32(lhs_mat_23_1, 245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) - - const __m256i lhs_mat_01_2_sp2 = _mm256_shuffle_epi32(lhs_mat_01_2, 245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) - const __m256i lhs_mat_23_2_sp2 = _mm256_shuffle_epi32(lhs_mat_23_2, 245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) - - const __m256i lhs_mat_01_3_sp2 = _mm256_shuffle_epi32(lhs_mat_01_3, 245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) - const __m256i lhs_mat_23_3_sp2 = _mm256_shuffle_epi32(lhs_mat_23_3, 245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) - - // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane - // Resembles MMLAs into 2x2 matrices in ARM Version - const __m256i zero = _mm256_setzero_si256(); - __m256i iacc_mat_00_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp1, rhs_mat_0145_3_sp1), lhs_mat_01_2_sp1, rhs_mat_0145_2_sp1), lhs_mat_01_1_sp1, rhs_mat_0145_1_sp1), lhs_mat_01_0_sp1, rhs_mat_0145_0_sp1); - __m256i iacc_mat_01_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp1, rhs_mat_2367_3_sp1), lhs_mat_01_2_sp1, rhs_mat_2367_2_sp1), lhs_mat_01_1_sp1, rhs_mat_2367_1_sp1), lhs_mat_01_0_sp1, rhs_mat_2367_0_sp1); - __m256i iacc_mat_10_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp1, rhs_mat_0145_3_sp1), lhs_mat_23_2_sp1, rhs_mat_0145_2_sp1), lhs_mat_23_1_sp1, rhs_mat_0145_1_sp1), lhs_mat_23_0_sp1, rhs_mat_0145_0_sp1); - __m256i iacc_mat_11_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp1, rhs_mat_2367_3_sp1), lhs_mat_23_2_sp1, rhs_mat_2367_2_sp1), lhs_mat_23_1_sp1, rhs_mat_2367_1_sp1), lhs_mat_23_0_sp1, rhs_mat_2367_0_sp1); - __m256i iacc_mat_00_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp2, rhs_mat_0145_3_sp2), lhs_mat_01_2_sp2, rhs_mat_0145_2_sp2), lhs_mat_01_1_sp2, rhs_mat_0145_1_sp2), lhs_mat_01_0_sp2, rhs_mat_0145_0_sp2); - __m256i iacc_mat_01_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp2, rhs_mat_2367_3_sp2), lhs_mat_01_2_sp2, rhs_mat_2367_2_sp2), lhs_mat_01_1_sp2, rhs_mat_2367_1_sp2), lhs_mat_01_0_sp2, rhs_mat_2367_0_sp2); - __m256i iacc_mat_10_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp2, rhs_mat_0145_3_sp2), lhs_mat_23_2_sp2, rhs_mat_0145_2_sp2), lhs_mat_23_1_sp2, rhs_mat_0145_1_sp2), lhs_mat_23_0_sp2, rhs_mat_0145_0_sp2); - __m256i iacc_mat_11_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp2, rhs_mat_2367_3_sp2), lhs_mat_23_2_sp2, rhs_mat_2367_2_sp2), lhs_mat_23_1_sp2, rhs_mat_2367_1_sp2), lhs_mat_23_0_sp2, rhs_mat_2367_0_sp2); - - // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block - __m256i iacc_mat_00 = _mm256_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); - __m256i iacc_mat_01 = _mm256_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); - __m256i iacc_mat_10 = _mm256_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); - __m256i iacc_mat_11 = _mm256_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); - - // Straighten out to make 4 row vectors - __m256i iacc_row_0 = _mm256_blend_epi32(iacc_mat_00, _mm256_shuffle_epi32(iacc_mat_01, 78), 204); - __m256i iacc_row_1 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_00, 78), iacc_mat_01, 204); - __m256i iacc_row_2 = _mm256_blend_epi32(iacc_mat_10, _mm256_shuffle_epi32(iacc_mat_11, 78), 204); - __m256i iacc_row_3 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_10, 78), iacc_mat_11, 204); - - // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes - const __m256 row_scale_f32 = GGML_F32Cx8_REPEAT_LOAD(a_ptrs[rp][b].d, loadMask); - - // Multiply with appropiate scales and accumulate - acc_rows[rp * 4] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_0), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[rp * 4]); - acc_rows[rp * 4 + 1] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_1), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[rp * 4 + 1]); - acc_rows[rp * 4 + 2] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_2), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[rp * 4 + 2]); - acc_rows[rp * 4 + 3] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_3), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[rp * 4 + 3]); - } - } - - // Store the accumulated values - for (int i = 0; i < 16; i++) { - _mm256_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); - } - } - } - - // Take a block_q8_0x4 structures at each pass of the loop and perform dot product operation - for (; y < nr / 4; y ++) { - - const block_q8_0x4 * a_ptr = a_ptr_start + (y * nb); - - // Load the eight block_q4_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....B6,B7 - for (int64_t x = xstart; x < nc / 8; x++) { - - const block_q4_0x8 * b_ptr = b_ptr_start + (x * b_nb); - - // Master FP accumulators - __m256 acc_rows[4]; - for (int i = 0; i < 4; i++) { - acc_rows[i] = _mm256_setzero_ps(); - } - - for (int64_t b = 0; b < nb; b++) { - // Load the eight block_q8_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....B6,B7 - const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs)); - const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 32)); - const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 64)); - const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 96)); - - // Save the values in the following vectors in the formats B0B1B4B5, B2B3B6B7 for further processing and storing of valuess - const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); - const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); - const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); - const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); - - // 4-bit -> 8-bit - Sign is maintained - const __m256i rhs_mat_0145_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_0, m4b)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) - const __m256i rhs_mat_2367_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_0, m4b)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) - - const __m256i rhs_mat_0145_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_1, m4b)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) - const __m256i rhs_mat_2367_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_1, m4b)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) - - const __m256i rhs_mat_0145_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_0, 4), m4b)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) - const __m256i rhs_mat_2367_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_0, 4), m4b)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) - - const __m256i rhs_mat_0145_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_1, 4), m4b)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) - const __m256i rhs_mat_2367_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_1, 4), m4b)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) - - // Shuffle pattern one - right side input - const __m256i rhs_mat_0145_0_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_0, 136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) - const __m256i rhs_mat_2367_0_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_0, 136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) - - const __m256i rhs_mat_0145_1_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_1, 136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) - const __m256i rhs_mat_2367_1_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_1, 136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) - - const __m256i rhs_mat_0145_2_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_2, 136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) - const __m256i rhs_mat_2367_2_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_2, 136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) - - const __m256i rhs_mat_0145_3_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_3, 136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) - const __m256i rhs_mat_2367_3_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_3, 136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) - - // Shuffle pattern two - right side input - - const __m256i rhs_mat_0145_0_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_0, 221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) - const __m256i rhs_mat_2367_0_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_0, 221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) - - const __m256i rhs_mat_0145_1_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_1, 221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) - const __m256i rhs_mat_2367_1_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_1, 221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) - - const __m256i rhs_mat_0145_2_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_2, 221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) - const __m256i rhs_mat_2367_2_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_2, 221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) - - const __m256i rhs_mat_0145_3_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_3, 221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) - const __m256i rhs_mat_2367_3_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_3, 221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) - - // Scale values - Load the wight scale values of block_q4_0x8 - const __m256 col_scale_f32 = GGML_F32Cx8_LOAD(b_ptr[b].d); - - // Load the four block_q4_0 quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 - // Loaded as set of 128 bit vectors and repeated into a 256 bit vector - __m256i lhs_mat_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs))); - __m256i lhs_mat_01_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 0); - __m256i lhs_mat_23_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 17); - __m256i lhs_mat_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 32))); - __m256i lhs_mat_01_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 0); - __m256i lhs_mat_23_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 17); - __m256i lhs_mat_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 64))); - __m256i lhs_mat_01_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 0); - __m256i lhs_mat_23_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 17); - __m256i lhs_mat_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 96))); - __m256i lhs_mat_01_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 0); - __m256i lhs_mat_23_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 17); - - // Shuffle pattern one - left side input - - const __m256i lhs_mat_01_0_sp1 = _mm256_shuffle_epi32(lhs_mat_01_0, 160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) - const __m256i lhs_mat_23_0_sp1 = _mm256_shuffle_epi32(lhs_mat_23_0, 160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) - - const __m256i lhs_mat_01_1_sp1 = _mm256_shuffle_epi32(lhs_mat_01_1, 160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) - const __m256i lhs_mat_23_1_sp1 = _mm256_shuffle_epi32(lhs_mat_23_1, 160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) - - const __m256i lhs_mat_01_2_sp1 = _mm256_shuffle_epi32(lhs_mat_01_2, 160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) - const __m256i lhs_mat_23_2_sp1 = _mm256_shuffle_epi32(lhs_mat_23_2, 160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) - - const __m256i lhs_mat_01_3_sp1 = _mm256_shuffle_epi32(lhs_mat_01_3, 160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) - const __m256i lhs_mat_23_3_sp1 = _mm256_shuffle_epi32(lhs_mat_23_3, 160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) - - // Shuffle pattern two - left side input - - const __m256i lhs_mat_01_0_sp2 = _mm256_shuffle_epi32(lhs_mat_01_0, 245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) - const __m256i lhs_mat_23_0_sp2 = _mm256_shuffle_epi32(lhs_mat_23_0, 245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) - - const __m256i lhs_mat_01_1_sp2 = _mm256_shuffle_epi32(lhs_mat_01_1, 245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) - const __m256i lhs_mat_23_1_sp2 = _mm256_shuffle_epi32(lhs_mat_23_1, 245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) - - const __m256i lhs_mat_01_2_sp2 = _mm256_shuffle_epi32(lhs_mat_01_2, 245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) - const __m256i lhs_mat_23_2_sp2 = _mm256_shuffle_epi32(lhs_mat_23_2, 245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) - - const __m256i lhs_mat_01_3_sp2 = _mm256_shuffle_epi32(lhs_mat_01_3, 245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) - const __m256i lhs_mat_23_3_sp2 = _mm256_shuffle_epi32(lhs_mat_23_3, 245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) - - // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane - // Resembles MMLAs into 2x2 matrices in ARM Version - const __m256i zero = _mm256_setzero_si256(); - __m256i iacc_mat_00_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp1, rhs_mat_0145_3_sp1), lhs_mat_01_2_sp1, rhs_mat_0145_2_sp1), lhs_mat_01_1_sp1, rhs_mat_0145_1_sp1), lhs_mat_01_0_sp1, rhs_mat_0145_0_sp1); - __m256i iacc_mat_01_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp1, rhs_mat_2367_3_sp1), lhs_mat_01_2_sp1, rhs_mat_2367_2_sp1), lhs_mat_01_1_sp1, rhs_mat_2367_1_sp1), lhs_mat_01_0_sp1, rhs_mat_2367_0_sp1); - __m256i iacc_mat_10_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp1, rhs_mat_0145_3_sp1), lhs_mat_23_2_sp1, rhs_mat_0145_2_sp1), lhs_mat_23_1_sp1, rhs_mat_0145_1_sp1), lhs_mat_23_0_sp1, rhs_mat_0145_0_sp1); - __m256i iacc_mat_11_sp1 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp1, rhs_mat_2367_3_sp1), lhs_mat_23_2_sp1, rhs_mat_2367_2_sp1), lhs_mat_23_1_sp1, rhs_mat_2367_1_sp1), lhs_mat_23_0_sp1, rhs_mat_2367_0_sp1); - __m256i iacc_mat_00_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp2, rhs_mat_0145_3_sp2), lhs_mat_01_2_sp2, rhs_mat_0145_2_sp2), lhs_mat_01_1_sp2, rhs_mat_0145_1_sp2), lhs_mat_01_0_sp2, rhs_mat_0145_0_sp2); - __m256i iacc_mat_01_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_01_3_sp2, rhs_mat_2367_3_sp2), lhs_mat_01_2_sp2, rhs_mat_2367_2_sp2), lhs_mat_01_1_sp2, rhs_mat_2367_1_sp2), lhs_mat_01_0_sp2, rhs_mat_2367_0_sp2); - __m256i iacc_mat_10_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp2, rhs_mat_0145_3_sp2), lhs_mat_23_2_sp2, rhs_mat_0145_2_sp2), lhs_mat_23_1_sp2, rhs_mat_0145_1_sp2), lhs_mat_23_0_sp2, rhs_mat_0145_0_sp2); - __m256i iacc_mat_11_sp2 = mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(mul_sum_i8_pairs_acc_int32x8(zero, lhs_mat_23_3_sp2, rhs_mat_2367_3_sp2), lhs_mat_23_2_sp2, rhs_mat_2367_2_sp2), lhs_mat_23_1_sp2, rhs_mat_2367_1_sp2), lhs_mat_23_0_sp2, rhs_mat_2367_0_sp2); - - // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block - __m256i iacc_mat_00 = _mm256_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); - __m256i iacc_mat_01 = _mm256_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); - __m256i iacc_mat_10 = _mm256_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); - __m256i iacc_mat_11 = _mm256_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); - - - // Straighten out to make 4 row vectors - __m256i iacc_row_0 = _mm256_blend_epi32(iacc_mat_00, _mm256_shuffle_epi32(iacc_mat_01, 78), 204); - __m256i iacc_row_1 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_00, 78), iacc_mat_01, 204); - __m256i iacc_row_2 = _mm256_blend_epi32(iacc_mat_10, _mm256_shuffle_epi32(iacc_mat_11, 78), 204); - __m256i iacc_row_3 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_10, 78), iacc_mat_11, 204); - - // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes - const __m256 row_scale_f32 = GGML_F32Cx8_REPEAT_LOAD(a_ptr[b].d, loadMask); - - // Multiply with appropiate scales and accumulate - acc_rows[0] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_0), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[0]); - acc_rows[1] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_1), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[1]); - acc_rows[2] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_2), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[2]); - acc_rows[3] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_3), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[3]); - } - - // Store the accumulated values - for (int i = 0; i < 4; i++) { - _mm256_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); - } - } - } return; } +#endif // defined(__AVX2__) || defined(__AVX512F__) -#endif // #if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) ggml_gemm_q4_0_8x8_q8_0_generic(n, s, bs, vx, vy, nr, nc); } @@ -3364,6 +3408,21 @@ void ggml_gemm_q4_K_8x8_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const vo #endif } +void ggml_gemm_iq4_nl_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { +#if defined(__AVX2__) || defined(__AVX512F__) + { + __m256i signextendlut = _mm256_castsi128_si256(_mm_loadu_si128((const __m128i*)kvalues_iq4nl)); + signextendlut = _mm256_permute2f128_si256(signextendlut, signextendlut, 0); + + gemm_q4_b32_8x8_q8_0_lut_avx(n, s, bs, vx, vy, nr, nc, signextendlut); + + return; + } +#endif // defined(__AVX2__) || defined(__AVX512F__) + + ggml_gemm_iq4_nl_4x4_q8_0(n, s, bs, vx, vy, nr, nc); +} + void ggml_gemm_q2_K_8x8_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { const int qk = QK_K; const int nb = n / qk; diff --git a/ggml/src/ggml-cpu/ggml-cpu.c b/ggml/src/ggml-cpu/ggml-cpu.c index d89cd8f4ef..f6bea3df34 100644 --- a/ggml/src/ggml-cpu/ggml-cpu.c +++ b/ggml/src/ggml-cpu/ggml-cpu.c @@ -2022,6 +2022,11 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm ggml_compute_forward_opt_step_adamw(params, tensor); } break; + case GGML_OP_OPT_STEP_SGD: + { + ggml_compute_forward_opt_step_sgd(params, tensor); + } + break; case GGML_OP_NONE: { // nop @@ -2325,6 +2330,7 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { case GGML_OP_CROSS_ENTROPY_LOSS: case GGML_OP_CROSS_ENTROPY_LOSS_BACK: case GGML_OP_OPT_STEP_ADAMW: + case GGML_OP_OPT_STEP_SGD: { n_tasks = n_threads; } break; diff --git a/ggml/src/ggml-cpu/kleidiai/kleidiai.cpp b/ggml/src/ggml-cpu/kleidiai/kleidiai.cpp index 3a513a55d7..dff8fa244a 100644 --- a/ggml/src/ggml-cpu/kleidiai/kleidiai.cpp +++ b/ggml/src/ggml-cpu/kleidiai/kleidiai.cpp @@ -259,7 +259,10 @@ class tensor_traits : public ggml::cpu::tensor_traits { const int64_t m_start = 0; const int64_t n_step = static_cast(kernel->get_n_step()); - const int64_t num_threads = KAI_MIN(n / n_step, nth); + int64_t num_threads = KAI_MIN(n / n_step, nth); + if (num_threads <= 0) { + num_threads = 1; + } if (ith < num_threads) { const int64_t num_n_per_thread0 = round_down(n / num_threads, n_step); @@ -309,7 +312,8 @@ class tensor_traits : public ggml::cpu::tensor_traits { GGML_ASSERT(kernel); const int ith = params->ith; - const int nth = params->nth; + const int nth_raw = params->nth; + const int nth = nth_raw > 0 ? nth_raw : 1; const size_t k = ne00; const size_t m = ne11; @@ -327,9 +331,12 @@ class tensor_traits : public ggml::cpu::tensor_traits { const size_t num_n_per_thread = kai_roundup(kai_roundup(n, nth) / nth, n_step); const size_t n_start = ith * num_n_per_thread; - size_t n_to_process = num_n_per_thread; - if ((n_start + n_to_process) > n) { - n_to_process = n - n_start; + size_t n_to_process = 0; + if (n_start < n) { + n_to_process = num_n_per_thread; + if ((n_start + n_to_process) > n) { + n_to_process = n - n_start; + } } // Calculate number of columns to be processed per thread @@ -361,8 +368,10 @@ class tensor_traits : public ggml::cpu::tensor_traits { const void* lhs_ptr = (const void*)((const char *)lhs_packed + lhs_packed_offset); float *dst_ptr = reinterpret_cast(static_cast(dst->data) + dst_offset); - variant_call(kernel->run_kernel, m, n_to_process, k, QK4_0, lhs_ptr, rhs_ptr, dst_ptr, dst_stride, - sizeof(float), -FLT_MAX, FLT_MAX); + if (n_to_process > 0) { + variant_call(kernel->run_kernel, m, n_to_process, k, QK4_0, lhs_ptr, rhs_ptr, dst_ptr, dst_stride, + sizeof(float), -FLT_MAX, FLT_MAX); + } return true; } diff --git a/ggml/src/ggml-cpu/ops.cpp b/ggml/src/ggml-cpu/ops.cpp index 854f1c2b49..b72a2556a5 100644 --- a/ggml/src/ggml-cpu/ops.cpp +++ b/ggml/src/ggml-cpu/ops.cpp @@ -10330,6 +10330,7 @@ static void ggml_compute_forward_opt_step_adamw_f32( const int ir1 = MIN(ir0 + dr, nr); const float * adamw_params_ptr = ggml_get_data_f32(adamw_params); + const float alpha = adamw_params_ptr[0]; const float beta1 = adamw_params_ptr[1]; const float beta2 = adamw_params_ptr[2]; @@ -10337,7 +10338,7 @@ static void ggml_compute_forward_opt_step_adamw_f32( const float wd = adamw_params_ptr[4]; const float beta1h = adamw_params_ptr[5]; const float beta2h = adamw_params_ptr[6]; - + const float keep = 1.f - alpha * wd; for (int ir = ir0; ir < ir1; ++ir) { const int64_t i03 = ir/(ne02*ne01); const int64_t i02 = (ir - i03*ne02*ne01)/ne01; @@ -10360,7 +10361,7 @@ static void ggml_compute_forward_opt_step_adamw_f32( // The weight decay is applied independently of the Adam momenta m and v. // This is NOT equivalent to l2 regularization that adds w[i00]*w[i00] to the loss. // See: https://arxiv.org/pdf/1711.05101v3.pdf - w[i00] = w[i00]*(1.0f - alpha*wd) - alpha*mh/vh; + w[i00] = w[i00] * keep - alpha * mh / vh; } } } @@ -10382,3 +10383,63 @@ void ggml_compute_forward_opt_step_adamw( } } } + +static void ggml_compute_forward_opt_step_sgd_f32(const ggml_compute_params * params, ggml_tensor * dst) { + const ggml_tensor * src0 = dst->src[0]; + const ggml_tensor * src0_grad = dst->src[1]; + const ggml_tensor * sgd_params = dst->src[2]; + + GGML_ASSERT(ggml_are_same_shape(src0, src0_grad)); + GGML_ASSERT(ggml_nelements(sgd_params) == 2); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_UNARY_OP_LOCALS + GGML_ASSERT(nb00 == sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1) / nth; + + // row range for this thread + const int ir0 = dr * ith; + const int ir1 = MIN(ir0 + dr, nr); + + // using adamw param subset we care about - alpha, wd - could have a separate struct + const float * sgd_params_ptr = ggml_get_data_f32(sgd_params); + const float alpha = sgd_params_ptr[0]; + const float keep = 1.f - alpha * sgd_params_ptr[1]; + + for (int ir = ir0; ir < ir1; ++ir) { + const int64_t i03 = ir / (ne02 * ne01); + const int64_t i02 = (ir - i03 * ne02 * ne01) / ne01; + const int64_t i01 = (ir - i03 * ne02 * ne01 - i02 * ne01); + + const size_t offset = i03 * nb03 + i02 * nb02 + i01 * nb01; + + float * w = (float *) ((char *) src0->data + offset); // weight + const float * g = (const float *) ((const char *) src0_grad->data + offset); // grad + + for (int i00 = 0; i00 < ne00; ++i00) { + w[i00] = w[i00] * keep - alpha * g[i00]; + } + } +} + +void ggml_compute_forward_opt_step_sgd(const ggml_compute_params * params, ggml_tensor * dst) { + const ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_opt_step_sgd_f32(params, dst); + } + break; + default: + { + GGML_ABORT("fatal error - sgd is F32 only"); + } + } +} diff --git a/ggml/src/ggml-cpu/ops.h b/ggml/src/ggml-cpu/ops.h index f154afb462..82ea79eaa5 100644 --- a/ggml/src/ggml-cpu/ops.h +++ b/ggml/src/ggml-cpu/ops.h @@ -107,7 +107,7 @@ void ggml_compute_forward_cross_entropy_loss(const struct ggml_compute_params * void ggml_compute_forward_cross_entropy_loss_back(const struct ggml_compute_params * params, struct ggml_tensor * dst); void ggml_compute_forward_opt_step_adamw(const struct ggml_compute_params * params, struct ggml_tensor * dst); void ggml_compute_forward_mul_mat(const struct ggml_compute_params * params, struct ggml_tensor * dst); - +void ggml_compute_forward_opt_step_sgd(const struct ggml_compute_params * params, struct ggml_tensor * dst); #ifdef __cplusplus } #endif diff --git a/ggml/src/ggml-cpu/repack.cpp b/ggml/src/ggml-cpu/repack.cpp index 2583aefae4..f531d21e23 100644 --- a/ggml/src/ggml-cpu/repack.cpp +++ b/ggml/src/ggml-cpu/repack.cpp @@ -206,8 +206,9 @@ void ggml_gemv_q4_0_4x4_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, const int ncols_interleaved = 4; const int blocklen = 4; - assert (n % qk == 0); - assert (nc % ncols_interleaved == 0); + assert(nr == 1); + assert(n % qk == 0); + assert(nc % ncols_interleaved == 0); UNUSED(s); UNUSED(bs); @@ -307,30 +308,28 @@ void ggml_gemv_q4_0_8x8_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, UNUSED(ncols_interleaved); UNUSED(blocklen); - { - float sumf[8]; - int sumi; + float sumf[8]; + int sumi; - const block_q8_0 * a_ptr = (const block_q8_0 *) vy; - for (int x = 0; x < nc / ncols_interleaved; x++) { - const block_q4_0x8 * b_ptr = (const block_q4_0x8 *) vx + (x * nb); + const block_q8_0 * a_ptr = (const block_q8_0 *) vy; + for (int x = 0; x < nc / ncols_interleaved; x++) { + const block_q4_0x8 * b_ptr = (const block_q4_0x8 *) vx + (x * nb); - for (int j = 0; j < ncols_interleaved; j++) sumf[j] = 0.0; - for (int l = 0; l < nb; l++) { - for (int k = 0; k < (qk / (2 * blocklen)); k++) { - for (int j = 0; j < ncols_interleaved; j++) { - sumi = 0; - for (int i = 0; i < blocklen; ++i) { - const int v0 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] << 4); - const int v1 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0xF0); - sumi += ((v0 * a_ptr[l].qs[k * blocklen + i]) + (v1 * a_ptr[l].qs[k * blocklen + i + qk / 2])) >> 4; - } - sumf[j] += sumi * GGML_CPU_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_CPU_FP16_TO_FP32(a_ptr[l].d); + for (int j = 0; j < ncols_interleaved; j++) sumf[j] = 0.0; + for (int l = 0; l < nb; l++) { + for (int k = 0; k < (qk / (2 * blocklen)); k++) { + for (int j = 0; j < ncols_interleaved; j++) { + sumi = 0; + for (int i = 0; i < blocklen; ++i) { + const int v0 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] << 4); + const int v1 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0xF0); + sumi += ((v0 * a_ptr[l].qs[k * blocklen + i]) + (v1 * a_ptr[l].qs[k * blocklen + i + qk / 2])) >> 4; } + sumf[j] += sumi * GGML_CPU_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_CPU_FP16_TO_FP32(a_ptr[l].d); } } - for (int j = 0; j < ncols_interleaved; j++) s[x * ncols_interleaved + j] = sumf[j]; } + for (int j = 0; j < ncols_interleaved; j++) s[x * ncols_interleaved + j] = sumf[j]; } } @@ -494,43 +493,73 @@ void ggml_gemv_iq4_nl_4x4_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs const int ncols_interleaved = 4; const int blocklen = 4; - assert (n % qk == 0); - assert (nc % ncols_interleaved == 0); + assert(nr == 1); + assert(n % qk == 0); + assert(nc % ncols_interleaved == 0); - UNUSED(s); UNUSED(bs); - UNUSED(vx); - UNUSED(vy); UNUSED(nr); - UNUSED(nc); - UNUSED(nb); - UNUSED(ncols_interleaved); - UNUSED(blocklen); - { - float sumf[4]; - int sumi; + float sumf[4]; + int sumi; - const block_q8_0 * a_ptr = (const block_q8_0 *) vy; - for (int x = 0; x < nc / ncols_interleaved; x++) { - const block_iq4_nlx4 * b_ptr = (const block_iq4_nlx4 *) vx + (x * nb); + const block_q8_0 * a_ptr = (const block_q8_0 *) vy; + for (int x = 0; x < nc / ncols_interleaved; x++) { + const block_iq4_nlx4 * b_ptr = (const block_iq4_nlx4 *) vx + (x * nb); - for (int j = 0; j < ncols_interleaved; j++) sumf[j] = 0.0; - for (int l = 0; l < nb; l++) { - for (int k = 0; k < (qk / (2 * blocklen)); k++) { - for (int j = 0; j < ncols_interleaved; j++) { - sumi = 0; - for (int i = 0; i < blocklen; ++i) { - const int v0 = kvalues_iq4nl[b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0x0F]; - const int v1 = kvalues_iq4nl[b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] >> 4]; - sumi += ((v0 * a_ptr[l].qs[k * blocklen + i]) + (v1 * a_ptr[l].qs[k * blocklen + i + qk / 2])); - } - sumf[j] += sumi * GGML_CPU_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_CPU_FP16_TO_FP32(a_ptr[l].d); + for (int j = 0; j < ncols_interleaved; j++) sumf[j] = 0.0; + for (int l = 0; l < nb; l++) { + for (int k = 0; k < (qk / (2 * blocklen)); k++) { + for (int j = 0; j < ncols_interleaved; j++) { + sumi = 0; + for (int i = 0; i < blocklen; ++i) { + const int v0 = kvalues_iq4nl[b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0x0F]; + const int v1 = kvalues_iq4nl[b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] >> 4]; + sumi += ((v0 * a_ptr[l].qs[k * blocklen + i]) + (v1 * a_ptr[l].qs[k * blocklen + i + qk / 2])); } + sumf[j] += sumi * GGML_CPU_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_CPU_FP16_TO_FP32(a_ptr[l].d); } } - for (int j = 0; j < ncols_interleaved; j++) s[x * ncols_interleaved + j] = sumf[j]; } + for (int j = 0; j < ncols_interleaved; j++) s[x * ncols_interleaved + j] = sumf[j]; + } +} + +void ggml_gemv_iq4_nl_8x8_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { + const int qk = QK8_0; + const int nb = n / qk; + const int ncols_interleaved = 8; + const int blocklen = 8; + + assert(nr == 1); + assert(n % qk == 0); + assert(nc % ncols_interleaved == 0); + + UNUSED(bs); + UNUSED(nr); + + float sumf[8]; + int sumi; + + const block_q8_0 * a_ptr = (const block_q8_0 *) vy; + for (int x = 0; x < nc / ncols_interleaved; x++) { + const block_iq4_nlx8 * b_ptr = (const block_iq4_nlx8 *) vx + (x * nb); + + for (int j = 0; j < ncols_interleaved; j++) sumf[j] = 0.0; + for (int l = 0; l < nb; l++) { + for (int k = 0; k < (qk / (2 * blocklen)); k++) { + for (int j = 0; j < ncols_interleaved; j++) { + sumi = 0; + for (int i = 0; i < blocklen; ++i) { + const int v0 = kvalues_iq4nl[b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0x0F]; + const int v1 = kvalues_iq4nl[b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] >> 4]; + sumi += ((v0 * a_ptr[l].qs[k * blocklen + i]) + (v1 * a_ptr[l].qs[k * blocklen + i + qk / 2])); + } + sumf[j] += sumi * GGML_CPU_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_CPU_FP16_TO_FP32(a_ptr[l].d); + } + } + } + for (int j = 0; j < ncols_interleaved; j++) s[x * ncols_interleaved + j] = sumf[j]; } } @@ -934,6 +963,50 @@ void ggml_gemm_iq4_nl_4x4_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs } } +void ggml_gemm_iq4_nl_8x8_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { + const int qk = QK8_0; + const int nb = n / qk; + const int ncols_interleaved = 8; + const int blocklen = 8; + + assert(n % qk == 0); + assert(nr % 4 == 0); + assert(nc % ncols_interleaved == 0); + + float sumf[4][8]; + int sumi; + + for (int y = 0; y < nr / 4; y++) { + const block_q8_0x4 * a_ptr = (const block_q8_0x4 *) vy + (y * nb); + for (int x = 0; x < nc / ncols_interleaved; x++) { + const block_iq4_nlx8 * b_ptr = (const block_iq4_nlx8 *) vx + (x * nb); + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) sumf[m][j] = 0.0; + } + for (int l = 0; l < nb; l++) { + for (int k = 0; k < (qk / (2 * blocklen)); k++) { + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) { + sumi = 0; + for (int i = 0; i < blocklen; ++i) { + const int v0 = kvalues_iq4nl[b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0x0F]; + const int v1 = kvalues_iq4nl[b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] >> 4]; + sumi += ((v0 * a_ptr[l].qs[k * 4 * blocklen + m * blocklen + i]) + + (v1 * a_ptr[l].qs[k * 4 * blocklen + m * blocklen + i + qk / 2 * 4])); + } + sumf[m][j] += sumi * GGML_CPU_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_CPU_FP16_TO_FP32(a_ptr[l].d[m]); + } + } + } + } + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) + s[(y * 4 + m) * bs + x * ncols_interleaved + j] = sumf[m][j]; + } + } + } +} + } // extern "C" static block_q4_0x4 make_block_q4_0x4(block_q4_0 * in, unsigned int blck_size_interleave) { @@ -1285,15 +1358,16 @@ static block_iq4_nlx4 make_block_iq4_nlx4(block_iq4_nl * in, unsigned int blck_s static int repack_iq4_nl_to_iq4_nl_4_bl(struct ggml_tensor * t, int interleave_block, const void * GGML_RESTRICT data, size_t data_size) { GGML_ASSERT(t->type == GGML_TYPE_IQ4_NL); - //GGML_ASSERT(interleave_block == 4 || interleave_block == 8); GGML_ASSERT(interleave_block == 4); - block_iq4_nlx4 * dst = (block_iq4_nlx4 *)t->data; - const block_iq4_nl * src = (const block_iq4_nl *)data; + const block_iq4_nl * src = (const block_iq4_nl *)data; + block_iq4_nlx4 * dst = ( block_iq4_nlx4 *)t->data; + block_iq4_nl dst_tmp[4]; + int nrow = ggml_nrows(t); int nrows_interleaved = 4; - int nblocks = t->ne[0] / QK4_0; + int nblocks = t->ne[0] / QK4_NL; GGML_ASSERT(data_size == nrow * nblocks * sizeof(block_iq4_nl)); @@ -1315,6 +1389,63 @@ static int repack_iq4_nl_to_iq4_nl_4_bl(struct ggml_tensor * t, int interleave_b GGML_UNUSED(data_size); } +static block_iq4_nlx8 make_block_iq4_nlx8(block_iq4_nl * in, unsigned int blck_size_interleave) { + block_iq4_nlx8 out; + + for (int i = 0; i < 8; i++) { + out.d[i] = in[i].d; + } + + const int end = QK4_NL * 4 / blck_size_interleave; + + if (blck_size_interleave == 8) { + for (int i = 0; i < end; ++i) { + int src_id = i % 8; + int src_offset = (i / 8) * blck_size_interleave; + int dst_offset = i * blck_size_interleave; + + memcpy(&out.qs[dst_offset], &in[src_id].qs[src_offset], sizeof(uint64_t)); + } + } else { + GGML_ASSERT(false); + } + + return out; +} + +static int repack_iq4_nl_to_iq4_nl_8_bl(struct ggml_tensor * t, int interleave_block, const void * GGML_RESTRICT data, size_t data_size) { + GGML_ASSERT(t->type == GGML_TYPE_IQ4_NL); + GGML_ASSERT(interleave_block == 8); + + const block_iq4_nl * src = (const block_iq4_nl *)data; + block_iq4_nlx8 * dst = ( block_iq4_nlx8 *)t->data; + + block_iq4_nl dst_tmp[8]; + + int nrow = ggml_nrows(t); + int nrows_interleaved = 8; + int nblocks = t->ne[0] / QK4_NL; + + GGML_ASSERT(data_size == nrow * nblocks * sizeof(block_iq4_nl)); + + if (t->ne[1] % nrows_interleaved != 0) { + return -1; + } + + for (int b = 0; b < nrow; b += nrows_interleaved) { + for (int64_t x = 0; x < nblocks; x++) { + for (int i = 0; i < nrows_interleaved; i++) { + dst_tmp[i] = src[x + i * nblocks]; + } + *dst++ = make_block_iq4_nlx8(dst_tmp, interleave_block); + } + src += nrows_interleaved * nblocks; + } + return 0; + + GGML_UNUSED(data_size); +} + namespace ggml::cpu::repack { // repack template @@ -1350,6 +1481,10 @@ template <> int repack(struct ggml_tensor * t, const void * // return repack_iq4_nl_to_iq4_nl_4_bl(t, 8, data, data_size); //} +template <> int repack(struct ggml_tensor * t, const void * data, size_t data_size) { + return repack_iq4_nl_to_iq4_nl_8_bl(t, 8, data, data_size); +} + // gemv template void gemv(int, float *, size_t, const void *, const void *, int, int); @@ -1378,6 +1513,10 @@ template <> void gemv(int n, float * s, size ggml_gemv_iq4_nl_4x4_q8_0(n, s, bs, vx, vy, nr, nc); } +template <> void gemv(int n, float * s, size_t bs, const void * vx, const void * vy, int nr, int nc) { + ggml_gemv_iq4_nl_8x8_q8_0(n, s, bs, vx, vy, nr, nc); +} + // gemm template void gemm(int, float *, size_t, const void *, const void *, int, int); @@ -1406,6 +1545,10 @@ template <> void gemm(int n, float * s, size ggml_gemm_iq4_nl_4x4_q8_0(n, s, bs, vx, vy, nr, nc); } +template <> void gemm(int n, float * s, size_t bs, const void * vx, const void * vy, int nr, int nc) { + ggml_gemm_iq4_nl_8x8_q8_0(n, s, bs, vx, vy, nr, nc); +} + class tensor_traits_base : public ggml::cpu::tensor_traits { public: virtual int repack(struct ggml_tensor * t, const void * data, size_t data_size) = 0; @@ -1680,6 +1823,7 @@ static const ggml::cpu::tensor_traits * ggml_repack_get_optimal_repack_type(cons // instance for IQ4 static const ggml::cpu::repack::tensor_traits iq4_nl_4x4_q8_0; + static const ggml::cpu::repack::tensor_traits iq4_nl_8x8_q8_0; if (cur->type == GGML_TYPE_Q4_0) { if (ggml_cpu_has_avx2() || (ggml_cpu_has_sve() && ggml_cpu_has_matmul_int8() && ggml_cpu_get_sve_cnt() == QK8_0)) { @@ -1710,6 +1854,11 @@ static const ggml::cpu::tensor_traits * ggml_repack_get_optimal_repack_type(cons } } } else if (cur->type == GGML_TYPE_IQ4_NL) { + if (ggml_cpu_has_avx2()) { + if (cur->ne[1] % 8 == 0) { + return &iq4_nl_8x8_q8_0; + } + } if (ggml_cpu_has_neon() && ggml_cpu_has_dotprod()) { if (cur->ne[1] % 4 == 0) { return &iq4_nl_4x4_q8_0; diff --git a/ggml/src/ggml-cpu/repack.h b/ggml/src/ggml-cpu/repack.h index cd322e7438..cb32b503d3 100644 --- a/ggml/src/ggml-cpu/repack.h +++ b/ggml/src/ggml-cpu/repack.h @@ -67,6 +67,13 @@ struct block_iq4_nlx4 { static_assert(sizeof(block_iq4_nlx4) == 4 * sizeof(ggml_half) + QK4_NL * 2, "wrong iq4_nlx4 block size/padding"); +struct block_iq4_nlx8 { + ggml_half d[8]; // deltas for 8 iq4_nl blocks + uint8_t qs[QK4_NL * 4]; // nibbles / quants for 8 iq4_nl blocks +}; + +static_assert(sizeof(block_iq4_nlx8) == 8 * sizeof(ggml_half) + QK4_NL * 4, "wrong iq4_nlx8 block size/padding"); + #if defined(__cplusplus) extern "C" { #endif @@ -80,12 +87,14 @@ void ggml_gemv_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const vo void ggml_gemv_q4_K_8x8_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemv_q2_K_8x8_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemv_iq4_nl_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); +void ggml_gemv_iq4_nl_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_q4_0_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_q4_0_4x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_q4_K_8x8_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_q2_K_8x8_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_iq4_nl_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); +void ggml_gemm_iq4_nl_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); // Native implementations void ggml_quantize_mat_q8_0_4x4_generic(const float * GGML_RESTRICT x, void * GGML_RESTRICT vy, int64_t k); @@ -97,12 +106,14 @@ void ggml_gemv_q4_0_8x8_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, void ggml_gemv_q4_K_8x8_q8_K_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemv_q2_K_8x8_q8_K_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemv_iq4_nl_4x4_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); +void ggml_gemv_iq4_nl_8x8_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_q4_0_4x4_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_q4_0_4x8_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_q4_0_8x8_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_q4_K_8x8_q8_K_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_q2_K_8x8_q8_K_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); void ggml_gemm_iq4_nl_4x4_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); +void ggml_gemm_iq4_nl_8x8_q8_0_generic(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); #if defined(__cplusplus) } // extern "C" diff --git a/ggml/src/ggml-cuda/CMakeLists.txt b/ggml/src/ggml-cuda/CMakeLists.txt index 98ed29bc9c..bce07ac362 100644 --- a/ggml/src/ggml-cuda/CMakeLists.txt +++ b/ggml/src/ggml-cuda/CMakeLists.txt @@ -120,6 +120,10 @@ if (CUDAToolkit_FOUND) set(CUDA_FLAGS -use_fast_math -extended-lambda) + if (GGML_CUDA_DEBUG) + list(APPEND CUDA_FLAGS -lineinfo) + endif() + if (CUDAToolkit_VERSION VERSION_GREATER_EQUAL "12.8") # Options are: # - none (not recommended) diff --git a/ggml/src/ggml-cuda/common.cuh b/ggml/src/ggml-cuda/common.cuh index 2e5d48797f..76ace816ff 100644 --- a/ggml/src/ggml-cuda/common.cuh +++ b/ggml/src/ggml-cuda/common.cuh @@ -78,6 +78,8 @@ #define GGML_CUDA_CC_IS_CDNA3(cc) (cc >= GGML_CUDA_CC_CDNA3 && cc < GGML_CUDA_CC_RDNA1) // Moore Threads +#define MUSART_HMASK 40300 // MUSA rc4.3, min. ver. for half2 -> uint mask comparisons + #define GGML_CUDA_CC_QY1 (GGML_CUDA_CC_OFFSET_MTHREADS + 0x210) // MTT S80, MTT S3000 #define GGML_CUDA_CC_QY2 (GGML_CUDA_CC_OFFSET_MTHREADS + 0x220) // MTT S4000 #define GGML_CUDA_CC_NG (GGML_CUDA_CC_OFFSET_MTHREADS + 0x310) // TBD @@ -87,6 +89,10 @@ #define GGML_CUDA_CC_IS_QY2(cc) (cc >= GGML_CUDA_CC_QY2 && cc < GGML_CUDA_CC_NG) #define GGML_CUDA_CC_IS_NG(cc) (cc >= GGML_CUDA_CC_NG) +#if !defined(GGML_USE_HIP) && !defined(GGML_USE_MUSA) && CUDART_VERSION >= 11070 +# define GGML_CUDA_USE_CUB +#endif // !defined(GGML_USE_HIP) && !defined(GGML_USE_MUSA) && CUDART_VERSION >= 11070 + #ifdef __CUDA_ARCH_LIST__ constexpr bool ggml_cuda_has_arch_impl(int) { return false; @@ -312,11 +318,11 @@ static bool turing_mma_available(const int cc) { } static bool ampere_mma_available(const int cc) { - return cc < GGML_CUDA_CC_OFFSET_AMD && ggml_cuda_highest_compiled_arch(cc) >= GGML_CUDA_CC_AMPERE; + return GGML_CUDA_CC_IS_NVIDIA(cc) && ggml_cuda_highest_compiled_arch(cc) >= GGML_CUDA_CC_AMPERE; } static bool cp_async_available(const int cc) { - return cc < GGML_CUDA_CC_OFFSET_AMD && ggml_cuda_highest_compiled_arch(cc) >= GGML_CUDA_CC_AMPERE; + return GGML_CUDA_CC_IS_NVIDIA(cc) && ggml_cuda_highest_compiled_arch(cc) >= GGML_CUDA_CC_AMPERE; } static constexpr __device__ int ggml_cuda_get_physical_warp_size() { @@ -420,26 +426,6 @@ static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { #endif // FP16_AVAILABLE } -// Row reduction kernel template - compute sum (norm=false) or mean (norm=true) -template -static __global__ void reduce_rows_f32(const float * x, float * dst, const int ncols) { - const int row = blockIdx.x; - const int col = threadIdx.x; - - float sum = 0.0f; - for (int i = col; i < ncols; i += blockDim.x) { - sum += x[row * ncols + i]; - } - - sum = warp_reduce_sum(sum); - - if (col != 0) { - return; - } - - dst[row] = norm ? sum / ncols : sum; -} - template static __device__ __forceinline__ int warp_reduce_all(int x) { #ifdef GGML_USE_HIP @@ -480,25 +466,21 @@ static __device__ __forceinline__ half ggml_cuda_hmax(const half a, const half b } static __device__ __forceinline__ half2 ggml_cuda_hmax2(const half2 a, const half2 b) { -#if defined(GGML_USE_HIP) && HIP_VERSION >= 50700000 +#if defined(GGML_USE_HIP) return half2(__hmax(a.x, b.x), __hmax(a.y, b.y)); -#elif !defined(GGML_USE_HIP) && CUDART_VERSION >= CUDART_HMAX +#elif CUDART_VERSION >= CUDART_HMAX return __hmax2(a, b); -#elif !defined(GGML_USE_HIP) +#else half2 ret; reinterpret_cast(ret.x) = __float2half(fmaxf( __low2float(a), __low2float(b))); reinterpret_cast(ret.y) = __float2half(fmaxf(__high2float(a), __high2float(b))); return ret; -#else - GGML_UNUSED(a); - GGML_UNUSED(b); - NO_DEVICE_CODE; #endif } template static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { -#if !defined(GGML_USE_HIP) && __CUDA_ARCH__ >= GGML_CUDA_CC_PASCAL || (defined(GGML_USE_HIP) && HIP_VERSION >= 50700000) +#if !defined(GGML_USE_HIP) && __CUDA_ARCH__ >= GGML_CUDA_CC_PASCAL || defined(GGML_USE_HIP) #pragma unroll for (int offset = width/2; offset > 0; offset >>= 1) { x = ggml_cuda_hmax2(x, __shfl_xor_sync(0xffffffff, x, offset, width)); @@ -507,16 +489,17 @@ static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { #else GGML_UNUSED(x); NO_DEVICE_CODE; -#endif // !defined(GGML_USE_HIP) && __CUDA_ARCH__ >= GGML_CUDA_CC_PASCAL || (defined(GGML_USE_HIP) && HIP_VERSION >= 50700000) +#endif // !defined(GGML_USE_HIP) && __CUDA_ARCH__ >= GGML_CUDA_CC_PASCAL || defined(GGML_USE_HIP) } -#if CUDART_VERSION < CUDART_HMASK +#if (defined(CUDART_VERSION) && CUDART_VERSION < CUDART_HMASK) || defined(GGML_USE_HIP) || \ + (defined(MUSART_VERSION) && MUSART_VERSION < MUSART_HMASK) static __device__ __forceinline__ uint32_t __hgt2_mask(const half2 a, const half2 b) { const uint32_t mask_low = 0x0000FFFF * (float( __low2half(a)) > float( __low2half(b))); const uint32_t mask_high = 0xFFFF0000 * (float(__high2half(a)) > float(__high2half(b))); return mask_low | mask_high; } -#endif // CUDART_VERSION < CUDART_HMASK +#endif // (defined(CUDART_VERSION) && CUDART_VERSION < CUDART_HMASK) || defined(GGML_USE_HIP) || (defined(MUSART_VERSION) && MUSART_VERSION < MUSART_HMASK) static __device__ __forceinline__ int ggml_cuda_dp4a(const int a, const int b, int c) { #if defined(GGML_USE_HIP) diff --git a/ggml/src/ggml-cuda/convert.cu b/ggml/src/ggml-cuda/convert.cu index e3beddbc1b..8f0efdcc12 100644 --- a/ggml/src/ggml-cuda/convert.cu +++ b/ggml/src/ggml-cuda/convert.cu @@ -31,8 +31,8 @@ static __global__ void dequantize_block(const void * __restrict__ vx, dst_t * __ dequantize_kernel(vx, ib, iqs, v); const int64_t iy0 = ((i03*ne02 + i02)*ne01 + i01)*ne00 + iybs + iqs; - y[iy0 + 0] = float(v.x); - y[iy0 + y_offset] = float(v.y); + y[iy0 + 0] = ggml_cuda_cast(v.x); + y[iy0 + y_offset] = ggml_cuda_cast(v.y); } template @@ -630,7 +630,7 @@ static __global__ void convert_unary( const int64_t ix = i03*s03 + i02*s02 + i01*s01 + i00; const int64_t iy = ((i03*ne02 + i02)*ne01 + i01)*ne00 + i00; - y[iy] = float(x[ix]); + y[iy] = ggml_cuda_cast(x[ix]); } template diff --git a/ggml/src/ggml-cuda/convert.cuh b/ggml/src/ggml-cuda/convert.cuh index f04214be17..c62e8a1b10 100644 --- a/ggml/src/ggml-cuda/convert.cuh +++ b/ggml/src/ggml-cuda/convert.cuh @@ -29,3 +29,16 @@ typedef to_t_nc_cuda_t to_bf16_nc_cuda_t; to_fp32_nc_cuda_t ggml_get_to_fp32_nc_cuda(ggml_type type); to_fp16_nc_cuda_t ggml_get_to_fp16_nc_cuda(ggml_type type); to_bf16_nc_cuda_t ggml_get_to_bf16_nc_cuda(ggml_type type); + +template + __host__ __device__ inline dst_t ggml_cuda_cast(src_t x) { + if constexpr (std::is_same_v) { + return x; + } else if constexpr(std::is_same_v) { + return __float2bfloat16(float(x)); + } else if constexpr(std::is_same_v) { + return __bfloat162float(x); + } else { + return float(x); + } +} diff --git a/ggml/src/ggml-cuda/cpy-utils.cuh b/ggml/src/ggml-cuda/cpy-utils.cuh index 410c12b7ba..e621cb9811 100644 --- a/ggml/src/ggml-cuda/cpy-utils.cuh +++ b/ggml/src/ggml-cuda/cpy-utils.cuh @@ -1,15 +1,7 @@ #pragma once #include "ggml-common.h" - -template -static __device__ __forceinline__ void convert_flt(const src_t * src, dst_t * dst) { - if constexpr (std::is_same_v) { - *dst = *src; - } else { - *dst = float(*src); - } -} +#include "convert.cuh" static __device__ __forceinline__ int best_index_int8(int n, const int8_t * val, float x) { if (x <= val[0]) return 0; @@ -221,5 +213,5 @@ static __device__ void cpy_blck_f32_iq4_nl(const char * cxi, char * cdsti) { template static __device__ void cpy_1_flt(const char * cxi, char * cdsti) { - convert_flt((const src_t *)cxi, (dst_t *)cdsti); + *(dst_t *) cdsti = ggml_cuda_cast(*(const src_t *) cxi); } diff --git a/ggml/src/ggml-cuda/fattn-common.cuh b/ggml/src/ggml-cuda/fattn-common.cuh index e46f0e2081..d4ed938391 100644 --- a/ggml/src/ggml-cuda/fattn-common.cuh +++ b/ggml/src/ggml-cuda/fattn-common.cuh @@ -539,11 +539,15 @@ static __global__ void flash_attn_mask_to_KV_max( all_inf = warp_reduce_all(all_inf); if (!all_inf) { - KV_max_sj += FATTN_KQ_STRIDE; break; } } + // If the break in the loop was not triggered, KV_max_sj is now -FATTN_KQ_STRIDE. + // If the break was triggered it's the lower edge of the tile with the first non-masked values. + // In either case, walk back the decrementation by FATTN_KQ_STRIDE. + KV_max_sj += FATTN_KQ_STRIDE; + if (threadIdx.x != 0) { return; } diff --git a/ggml/src/ggml-cuda/fattn-wmma-f16.cu b/ggml/src/ggml-cuda/fattn-wmma-f16.cu index fdc4d17da2..6bc7943ccd 100644 --- a/ggml/src/ggml-cuda/fattn-wmma-f16.cu +++ b/ggml/src/ggml-cuda/fattn-wmma-f16.cu @@ -15,7 +15,6 @@ namespace wmma = mtmusa::wmma; namespace wmma = nvcuda::wmma; #endif // GGML_USE_MUSA #elif defined(GGML_HIP_ROCWMMA_FATTN) && defined(FP16_MMA_AVAILABLE) -#undef HIP_ENABLE_WARP_SYNC_BUILTINS // conflicts with rocWMMA headers #include namespace wmma = rocwmma; #endif // !defined(GGML_USE_HIP) diff --git a/ggml/src/ggml-cuda/getrows.cu b/ggml/src/ggml-cuda/getrows.cu index f77b2629a1..68d3254fbe 100644 --- a/ggml/src/ggml-cuda/getrows.cu +++ b/ggml/src/ggml-cuda/getrows.cu @@ -1,5 +1,6 @@ #include "getrows.cuh" #include "dequantize.cuh" +#include "convert.cuh" template static __global__ void k_get_rows( @@ -34,8 +35,8 @@ static __global__ void k_get_rows( dfloat2 v; dequantize_kernel(src0_row, ib, iqs, v); - dst_row[iybs + iqs + 0] = float(v.x); - dst_row[iybs + iqs + y_offset] = float(v.y); + dst_row[iybs + iqs + 0] = ggml_cuda_cast(v.x); + dst_row[iybs + iqs + y_offset] = ggml_cuda_cast(v.y); } template @@ -62,7 +63,7 @@ static __global__ void k_get_rows_float( dst_t * dst_row = dst + i10*s1 + i11*s2 + i12*s3; const src0_t * src0_row = (const src0_t *)((const char *) src0 + i01*nb01 + i11*nb02 + i12*nb03); - dst_row[i00] = float(src0_row[i00]); + dst_row[i00] = ggml_cuda_cast(src0_row[i00]); } template diff --git a/ggml/src/ggml-cuda/ggml-cuda.cu b/ggml/src/ggml-cuda/ggml-cuda.cu index d9110491ec..d6402a8daa 100644 --- a/ggml/src/ggml-cuda/ggml-cuda.cu +++ b/ggml/src/ggml-cuda/ggml-cuda.cu @@ -28,6 +28,7 @@ #include "ggml-cuda/mmvq.cuh" #include "ggml-cuda/norm.cuh" #include "ggml-cuda/opt-step-adamw.cuh" +#include "ggml-cuda/opt-step-sgd.cuh" #include "ggml-cuda/out-prod.cuh" #include "ggml-cuda/pad.cuh" #include "ggml-cuda/pool2d.cuh" @@ -180,30 +181,6 @@ static int ggml_cuda_parse_id(char devName[]) { #endif // defined(GGML_USE_HIP) static ggml_cuda_device_info ggml_cuda_init() { -#if defined(GGML_USE_HIP) - // Workaround for a rocBLAS bug when using multiple graphics cards: - // https://github.com/ROCmSoftwarePlatform/rocBLAS/issues/1346 - { - int major_version = 0; - size_t version_length = 0; - if (rocblas_get_version_string_size(&version_length) == rocblas_status_success) { - std::vector version(version_length+1, '\0'); - if (rocblas_get_version_string(version.data(), version.size()) == rocblas_status_success) { - version.resize(::strlen(version.data())); - int parsed_value = 0; - if (std::from_chars(version.data(), version.data() + version.size(), parsed_value).ec == std::errc()) { - major_version = parsed_value; - } - } - } - if (major_version < 4) { - GGML_LOG_DEBUG(GGML_CUDA_NAME " calling rocblas_initialize as a workaround for a rocBLAS bug\n"); - rocblas_initialize(); - CUDA_CHECK(cudaDeviceSynchronize()); - } - } -#endif - ggml_cuda_device_info info = {}; cudaError_t err = cudaGetDeviceCount(&info.device_count); @@ -2503,6 +2480,9 @@ static bool ggml_cuda_compute_forward(ggml_backend_cuda_context & ctx, struct gg case GGML_OP_OPT_STEP_ADAMW: ggml_cuda_opt_step_adamw(ctx, dst); break; + case GGML_OP_OPT_STEP_SGD: + ggml_cuda_opt_step_sgd(ctx, dst); + break; default: return false; } @@ -3560,6 +3540,7 @@ static bool ggml_backend_cuda_device_supports_op(ggml_backend_dev_t dev, const g case GGML_OP_CROSS_ENTROPY_LOSS: case GGML_OP_CROSS_ENTROPY_LOSS_BACK: case GGML_OP_OPT_STEP_ADAMW: + case GGML_OP_OPT_STEP_SGD: return true; default: return false; diff --git a/ggml/src/ggml-cuda/mean.cu b/ggml/src/ggml-cuda/mean.cu index 4b238a3998..347abc1866 100644 --- a/ggml/src/ggml-cuda/mean.cu +++ b/ggml/src/ggml-cuda/mean.cu @@ -1,4 +1,14 @@ #include "mean.cuh" +#include "reduce_rows.cuh" + +#ifdef GGML_CUDA_USE_CUB +#include +using namespace cub; +#endif // GGML_CUDA_USE_CUB + +template __global__ void divide_by_count(T * result, size_t count) { + *result /= static_cast(count); +} void ggml_cuda_op_mean(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { const ggml_tensor * src0 = dst->src[0]; @@ -13,7 +23,51 @@ void ggml_cuda_op_mean(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { const int64_t ncols = src0->ne[0]; const int64_t nrows = ggml_nrows(src0); - const dim3 block_dims(WARP_SIZE, 1, 1); +// Special case for reducing vectors +#ifdef GGML_CUDA_USE_CUB +#ifdef USE_CUDA_GRAPH + cudaStreamCaptureStatus iscapturing; + CUDA_CHECK(cudaStreamIsCapturing(stream, &iscapturing)); +#endif // USE_CUDA_GRAPH + if ((nrows == 1) && +#ifdef USE_CUDA_GRAPH + // CUDA_GRAPHS_DISABLED + ((ncols > 65536) && + ((ctx.cuda_graph->instance == nullptr) && (iscapturing == cudaStreamCaptureStatusNone) || + ctx.cuda_graph->disable_due_to_gpu_arch || ctx.cuda_graph->disable_due_to_too_many_updates || + ctx.cuda_graph->disable_due_to_failed_graph_capture)) || + // CUDA_GRAPHS ENABLED + ((ncols > 32768) && + !((ctx.cuda_graph->instance == nullptr) && (iscapturing == cudaStreamCaptureStatusNone) || + ctx.cuda_graph->disable_due_to_gpu_arch || ctx.cuda_graph->disable_due_to_too_many_updates || + ctx.cuda_graph->disable_due_to_failed_graph_capture))) { +#else + (ncols > 65536)) { +#endif // USE_CUDA_GRAPH + // Single row - use device-wide reduction + size_t tmp_size = 0; + ggml_cuda_pool & pool = ctx.pool(); + + DeviceReduce::Sum(nullptr, tmp_size, src0_d, dst_d, ncols, stream); + + ggml_cuda_pool_alloc tmp_alloc(pool, tmp_size); + DeviceReduce::Sum(tmp_alloc.ptr, tmp_size, src0_d, dst_d, ncols, stream); + + // Divide by ncols + divide_by_count<<<1, 1, 0, stream>>>(dst_d, ncols); + return; + } +#endif // GGML_CUDA_USE_CUB + const dim3 block_nums(nrows, 1, 1); - reduce_rows_f32<<>>(src0_d, dst_d, ncols); + + const int id = ggml_cuda_get_device(); + const int nsm = ggml_cuda_info().devices[id].nsm; + if ((nrows / nsm) < 2) { + const dim3 block_dims(512, 1, 1); + reduce_rows_f32<<>>(src0_d, dst_d, ncols); + } else { + const dim3 block_dims(ncols < 1024 ? 32 : 128, 1, 1); + reduce_rows_f32<<>>(src0_d, dst_d, ncols); + } } diff --git a/ggml/src/ggml-cuda/mmvf.cu b/ggml/src/ggml-cuda/mmvf.cu index 1ad4bc75ba..16100b6804 100644 --- a/ggml/src/ggml-cuda/mmvf.cu +++ b/ggml/src/ggml-cuda/mmvf.cu @@ -1,5 +1,6 @@ #include "ggml.h" #include "common.cuh" +#include "convert.cuh" #include "mmvf.cuh" template @@ -93,8 +94,8 @@ static __global__ void mul_mat_vec_f( #pragma unroll for (int j = 0; j < ncols_dst; ++j) { const float2 tmpy = y2[j*stride_col_y2 + col2]; - sumf[j] += float(reinterpret_cast(&tmpx)[0]) * tmpy.x; - sumf[j] += float(reinterpret_cast(&tmpx)[1]) * tmpy.y; + sumf[j] += ggml_cuda_cast(reinterpret_cast(&tmpx)[0]) * tmpy.x; + sumf[j] += ggml_cuda_cast(reinterpret_cast(&tmpx)[1]) * tmpy.y; } } } else { diff --git a/ggml/src/ggml-cuda/opt-step-sgd.cu b/ggml/src/ggml-cuda/opt-step-sgd.cu new file mode 100644 index 0000000000..460b16de44 --- /dev/null +++ b/ggml/src/ggml-cuda/opt-step-sgd.cu @@ -0,0 +1,49 @@ +#include "ggml-impl.h" +#include "opt-step-sgd.cuh" + +#include + +static __global__ void opt_step_sgd_f32( + float * __restrict__ x, const float * __restrict__ g, + const float * __restrict__ pars, const int64_t k) { + + const int64_t i = (int64_t) blockIdx.x*blockDim.x + threadIdx.x; + + if (i >= k) { + return; + } + x[i] = x[i] * (1.0f - pars[0] * pars[1]) - pars[0] * g[i]; +} + +static void opt_step_sgd_f32_cuda( + float * x, const float * g, const float * __restrict__ pars, const int64_t k, cudaStream_t stream) { + + const dim3 block_dims(CUDA_OPT_STEP_SGD_BLOCK_SIZE, 1, 1); + const dim3 block_nums((k + CUDA_OPT_STEP_SGD_BLOCK_SIZE - 1) / CUDA_OPT_STEP_SGD_BLOCK_SIZE, 1, 1); + opt_step_sgd_f32<<>>(x, g, pars, k); +} + +void ggml_cuda_opt_step_sgd(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { + const ggml_tensor * src0 = dst->src[0]; + const ggml_tensor * src0_grad = dst->src[1]; + const ggml_tensor * params = dst->src[2]; + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT(src0_grad->type == GGML_TYPE_F32); + GGML_ASSERT(params->type == GGML_TYPE_F32); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(src0_grad)); + GGML_ASSERT(ggml_is_contiguous(params)); + GGML_ASSERT(ggml_are_same_shape(src0, src0_grad)); + GGML_ASSERT(ggml_nelements(params) == 2); + + float * src0_d = (float *) src0->data; + const float * src0_grad_d = (const float *) src0_grad->data; + const float * params_d = (const float *) params->data; + + cudaStream_t stream = ctx.stream(); + + const int64_t ne = ggml_nelements(src0); + + opt_step_sgd_f32_cuda(src0_d, src0_grad_d, params_d, ne, stream); +} diff --git a/ggml/src/ggml-cuda/opt-step-sgd.cuh b/ggml/src/ggml-cuda/opt-step-sgd.cuh new file mode 100644 index 0000000000..f97ab7d9be --- /dev/null +++ b/ggml/src/ggml-cuda/opt-step-sgd.cuh @@ -0,0 +1,5 @@ +#include "common.cuh" + +#define CUDA_OPT_STEP_SGD_BLOCK_SIZE 256 + +void ggml_cuda_opt_step_sgd(ggml_backend_cuda_context & ctx, ggml_tensor * dst); diff --git a/ggml/src/ggml-cuda/reduce_rows.cuh b/ggml/src/ggml-cuda/reduce_rows.cuh new file mode 100644 index 0000000000..6bee204136 --- /dev/null +++ b/ggml/src/ggml-cuda/reduce_rows.cuh @@ -0,0 +1,53 @@ +#include "common.cuh" + +// Row reduction kernel template - compute sum (norm=false) or mean (norm=true) +template +static __global__ void reduce_rows_f32(const float * __restrict__ x, float * __restrict__ dst, const int ncols) { + const int row = blockIdx.x; + const int col = threadIdx.x; + + float sum = 0.0f; + const int num_unroll = 8; + float temp[num_unroll]; + float sum_temp[num_unroll] = { 0.0f }; + for (int i = col; i < ncols;) { + for (int j = 0; j < num_unroll; ++j) { + if (i < ncols) { + temp[j] = x[row * ncols + i]; + } else { + temp[j] = 0; + } + i += blockDim.x; + } + for (int j = 0; j < num_unroll; ++j) { + sum_temp[j] += temp[j]; + } + } + for (int j = 0; j < num_unroll; ++j) { + sum += sum_temp[j]; + } + + // sum up partial sums + sum = warp_reduce_sum(sum); + if (blockDim.x > WARP_SIZE) { + assert((blockDim.x <= 1024) && (blockDim.x % WARP_SIZE) == 0); + __shared__ float s_sum[32]; + const int warp_id = threadIdx.x / WARP_SIZE; + const int lane_id = threadIdx.x % WARP_SIZE; + if (lane_id == 0) { + s_sum[warp_id] = sum; + } + __syncthreads(); + sum = 0.0f; + if (lane_id < (blockDim.x / WARP_SIZE)) { + sum = s_sum[lane_id]; + } + sum = warp_reduce_sum(sum); + } + + if (col != 0) { + return; + } + + dst[row] = norm ? sum / ncols : sum; +} diff --git a/ggml/src/ggml-cuda/set-rows.cu b/ggml/src/ggml-cuda/set-rows.cu index 0798343645..b4115a43c2 100644 --- a/ggml/src/ggml-cuda/set-rows.cu +++ b/ggml/src/ggml-cuda/set-rows.cu @@ -3,11 +3,6 @@ typedef void (*set_rows_kernel_t)(const char * src, char * dst); -template -__device__ __forceinline__ void set_rows_1(const src_t * src_f, dst_t * dst_f) { - convert_flt(src_f, dst_f); -} - // Generic quantized set_rows kernel template template static __global__ void k_set_rows_quant( @@ -117,9 +112,7 @@ static __global__ void k_set_rows( const src_t * src0_row = src0 + i01*s01 + i02*s02 + i03*s03; dst_t * dst_row_ptr = dst + dst_row*s1 + i02*s2 + i03*s3; - const src_t* src_elem = src0_row + i00; - dst_t* dst_elem = dst_row_ptr + i00; - set_rows_1(src_elem, dst_elem); + dst_row_ptr[i00] = ggml_cuda_cast(src0_row[i00]); GGML_UNUSED(ne10); GGML_UNUSED(ne13); diff --git a/ggml/src/ggml-cuda/sum.cu b/ggml/src/ggml-cuda/sum.cu index eb3d7cdba9..c56257b440 100644 --- a/ggml/src/ggml-cuda/sum.cu +++ b/ggml/src/ggml-cuda/sum.cu @@ -1,19 +1,15 @@ -#if !defined(GGML_USE_HIP) && !defined(GGML_USE_MUSA) && CUDART_VERSION >= 11070 -#define USE_CUB -#endif // !defined(GGML_USE_HIP) && !defined(GGML_USE_MUSA) && CUDART_VERSION >= 11070 +#include "sum.cuh" +#include "sumrows.cuh" -#ifdef USE_CUB +#ifdef GGML_CUDA_USE_CUB #include using namespace cub; -#endif // USE_CUB - -#include "sumrows.cuh" -#include "sum.cuh" +#endif // GGML_CUDA_USE_CUB #include void sum_f32_cuda(ggml_cuda_pool & pool, const float * x, float * dst, const int64_t ne, cudaStream_t stream) { -#ifdef USE_CUB +#ifdef GGML_CUDA_USE_CUB size_t tmp_size = 0; DeviceReduce::Sum(nullptr, tmp_size, x, dst, ne, stream); ggml_cuda_pool_alloc tmp_alloc(pool, tmp_size); @@ -23,7 +19,7 @@ void sum_f32_cuda(ggml_cuda_pool & pool, const float * x, float * dst, const int // For AMD there is rocPRIM which could be used as a drop-in replacement via hipcub but this would require C++11 -> C++14. sum_rows_f32_cuda(x, dst, ne, 1, stream); GGML_UNUSED(pool); -#endif // USE_CUB +#endif // GGML_CUDA_USE_CUB } void ggml_cuda_op_sum(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { diff --git a/ggml/src/ggml-cuda/sumrows.cu b/ggml/src/ggml-cuda/sumrows.cu index 2eee08fa07..4025771aad 100644 --- a/ggml/src/ggml-cuda/sumrows.cu +++ b/ggml/src/ggml-cuda/sumrows.cu @@ -1,9 +1,17 @@ +#include "reduce_rows.cuh" #include "sumrows.cuh" void sum_rows_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - const dim3 block_dims(WARP_SIZE, 1, 1); + const int id = ggml_cuda_get_device(); + const int nsm = ggml_cuda_info().devices[id].nsm; const dim3 block_nums(nrows, 1, 1); - reduce_rows_f32<<>>(x, dst, ncols); + if ((nrows / nsm) < 2) { + const dim3 block_dims(512, 1, 1); + reduce_rows_f32<<>>(x, dst, ncols); + } else { + const dim3 block_dims(ncols < 1024 ? 32 : 128, 1, 1); + reduce_rows_f32<<>>(x, dst, ncols); + } } void ggml_cuda_op_sum_rows(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { @@ -19,8 +27,17 @@ void ggml_cuda_op_sum_rows(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { const int64_t ncols = src0->ne[0]; const int64_t nrows = ggml_nrows(src0); - const dim3 block_dims(WARP_SIZE, 1, 1); const dim3 block_nums(nrows, 1, 1); - reduce_rows_f32<<>>(src0_d, dst_d, ncols); + const int id = ggml_cuda_get_device(); + const int nsm = ggml_cuda_info().devices[id].nsm; + if ((nrows / nsm) < 2) { + // Increase num threads to 512 for small nrows to better hide the latency + const dim3 block_dims(512, 1, 1); + reduce_rows_f32<<>>(src0_d, dst_d, ncols); + } else { + // Enough active SMs to hide latency, use smaller blocks to allow better scheduling + const dim3 block_dims(ncols < 1024 ? 32 : 128, 1, 1); + reduce_rows_f32<<>>(src0_d, dst_d, ncols); + } } diff --git a/ggml/src/ggml-cuda/vendors/hip.h b/ggml/src/ggml-cuda/vendors/hip.h index c31f319232..6e9c67aca0 100644 --- a/ggml/src/ggml-cuda/vendors/hip.h +++ b/ggml/src/ggml-cuda/vendors/hip.h @@ -1,12 +1,10 @@ #pragma once -#define HIP_ENABLE_WARP_SYNC_BUILTINS 1 +#define HIP_DISABLE_WARP_SYNC_BUILTINS 1 #include #include #include -#include -// for rocblas_initialize() -#include "rocblas/rocblas.h" +#include #define CUBLAS_GEMM_DEFAULT HIPBLAS_GEMM_DEFAULT #define CUBLAS_GEMM_DEFAULT_TENSOR_OP HIPBLAS_GEMM_DEFAULT @@ -137,7 +135,7 @@ #define CUBLAS_STATUS_INTERNAL_ERROR HIPBLAS_STATUS_INTERNAL_ERROR #define CUBLAS_STATUS_NOT_SUPPORTED HIPBLAS_STATUS_NOT_SUPPORTED -#if HIP_VERSION >= 70000000 +#if HIP_VERSION >= 60500000 #define CUBLAS_COMPUTE_16F HIPBLAS_COMPUTE_16F #define CUBLAS_COMPUTE_32F HIPBLAS_COMPUTE_32F #define CUBLAS_COMPUTE_32F_FAST_16F HIPBLAS_COMPUTE_32F_FAST_16F @@ -149,7 +147,7 @@ #define CUBLAS_COMPUTE_32F_FAST_16F HIPBLAS_R_32F #define cublasComputeType_t hipblasDatatype_t #define cudaDataType_t hipblasDatatype_t -#endif // HIP_VERSION >= 7000000 +#endif // HIP_VERSION >= 6050000 #if !defined(__HIP_PLATFORM_AMD__) #error "The HIP backend supports only AMD targets" @@ -181,8 +179,7 @@ #define RDNA4 #endif -#if defined(__gfx1100__) || defined(__gfx1101__) || defined(__gfx1102__) || defined(__gfx1103__) || \ - defined(__gfx1150__) || defined(__gfx1151__) +#if defined(__GFX11__) #define RDNA3 #endif @@ -199,8 +196,8 @@ #define __has_builtin(x) 0 #endif -typedef hip_bfloat16 nv_bfloat16; -typedef short2 nv_bfloat162; // FIXME there is no 2x BF16 type being defined in bfloat16.h, ad-hoc compilation fix +typedef __hip_bfloat16 nv_bfloat16; +typedef __hip_bfloat162 nv_bfloat162; typedef int8_t int8x4_t __attribute__((ext_vector_type(4))); typedef uint8_t uint8x4_t __attribute__((ext_vector_type(4))); @@ -251,17 +248,3 @@ static __device__ __forceinline__ unsigned int __vcmpne4(unsigned int a, unsigne } return c; } - -#if HIP_VERSION < 50600000 -// __shfl_xor() for half2 was added in ROCm 5.6 -static __device__ __forceinline__ half2 __shfl_xor(half2 var, int laneMask, int width) { - typedef union half2_b32 { - half2 val; - int b32; - } half2_b32_t; - half2_b32_t tmp; - tmp.val = var; - tmp.b32 = __shfl_xor(tmp.b32, laneMask, width); - return tmp.val; -} -#endif // HIP_VERSION < 50600000 diff --git a/ggml/src/ggml-hip/CMakeLists.txt b/ggml/src/ggml-hip/CMakeLists.txt index 852de97346..d327b90cce 100644 --- a/ggml/src/ggml-hip/CMakeLists.txt +++ b/ggml/src/ggml-hip/CMakeLists.txt @@ -46,8 +46,8 @@ if (GGML_HIP_ROCWMMA_FATTN) endif() endif() -if (${hip_VERSION} VERSION_LESS 5.5) - message(FATAL_ERROR "At least ROCM/HIP V5.5 is required") +if (${hip_VERSION} VERSION_LESS 6.1) + message(FATAL_ERROR "At least ROCM/HIP V6.1 is required") endif() message(STATUS "HIP and hipBLAS found") diff --git a/ggml/src/ggml-opencl/CMakeLists.txt b/ggml/src/ggml-opencl/CMakeLists.txt index d8290faa46..9a7ccbcff0 100644 --- a/ggml/src/ggml-opencl/CMakeLists.txt +++ b/ggml/src/ggml-opencl/CMakeLists.txt @@ -82,7 +82,9 @@ set(GGML_OPENCL_KERNELS mul_mv_q4_0_f32_1d_8x_flat mul_mv_q4_0_f32_1d_16x_flat mul_mv_q6_k + mul_mv_mxfp4_f32 mul_mv_id_q4_0_f32_8x_flat + mul_mv_id_mxfp4_f32 mul_mm_f32_f32_l4_lm mul_mm_f16_f32_l4_lm mul @@ -110,6 +112,9 @@ set(GGML_OPENCL_KERNELS mul_mat_f16_f32 conv2d conv2d_f16_f32 + flash_attn_f32_f16 + flash_attn_f16 + flash_attn_f32 ) foreach (K ${GGML_OPENCL_KERNELS}) diff --git a/ggml/src/ggml-opencl/ggml-opencl.cpp b/ggml/src/ggml-opencl/ggml-opencl.cpp index 8ba1e00df7..df27501361 100644 --- a/ggml/src/ggml-opencl/ggml-opencl.cpp +++ b/ggml/src/ggml-opencl/ggml-opencl.cpp @@ -25,6 +25,7 @@ #include #include #include +#include #include #include #include @@ -332,6 +333,7 @@ struct ggml_backend_opencl_context { cl_int alignment; size_t max_alloc_size; + size_t max_workgroup_size; bool fp16_support; bool has_vector_subgroup_broadcast; bool disable_fusion; @@ -365,6 +367,7 @@ struct ggml_backend_opencl_context { cl_program program_mul_mv_q4_0_f32_1d_8x_flat; cl_program program_mul_mv_q4_0_f32_1d_16x_flat; cl_program program_mul_mv_q6_K; + cl_program program_mul_mv_mxfp4_f32; cl_program program_mul_mv_f16_f16; cl_program program_mul_mv_f16_f32_1row; cl_program program_mul_mv_f16_f32_l4; @@ -398,6 +401,7 @@ struct ggml_backend_opencl_context { cl_program program_conv_2d_f16_f32; cl_program program_tsembd; cl_program program_mul_mv_id_q4_0_f32_8x_flat; + cl_program program_mul_mv_id_mxfp4_f32; cl_program program_mul_mm_f32_f32_l4_lm; cl_program program_mul_mm_f16_f32_l4_lm; @@ -422,6 +426,14 @@ struct ggml_backend_opencl_context { cl_kernel kernel_diag_mask_inf, kernel_diag_mask_inf_8; cl_kernel kernel_soft_max, kernel_soft_max_4; cl_kernel kernel_soft_max_f16, kernel_soft_max_4_f16; + std::map, cl_kernel> kernels_flash_attn_f16; + std::map, cl_kernel> kernels_flash_attn_f16_q1; + std::map, cl_kernel> kernels_flash_attn_f32; + std::map, cl_kernel> kernels_flash_attn_f32_q1; + std::map, cl_kernel> kernels_flash_attn_f32_f16; + std::map, cl_kernel> kernels_flash_attn_f32_f16_q1; + std::map, int> kernels_flash_attn_bm; + std::map, int> kernels_flash_attn_bn; cl_kernel kernel_get_rows_f32, kernel_get_rows_f16, kernel_get_rows_q4_0; cl_kernel kernel_set_rows_f32, kernel_set_rows_f16; cl_kernel kernel_rope_norm_f32, kernel_rope_norm_f16, kernel_rope_neox_f32, kernel_rope_neox_f16; @@ -439,6 +451,7 @@ struct ggml_backend_opencl_context { cl_kernel kernel_convert_block_q4_0_noshuffle; cl_kernel kernel_mul_mat_q4_0_f32_1d_8x_flat, kernel_mul_mat_q4_0_f32_1d_16x_flat; cl_kernel kernel_mul_mv_q6_K_f32; + cl_kernel kernel_mul_mv_mxfp4_f32; cl_kernel kernel_im2col_f32, kernel_im2col_f16; cl_kernel kernel_argsort_f32_i32; cl_kernel kernel_sum_rows_f32; @@ -455,6 +468,7 @@ struct ggml_backend_opencl_context { cl_kernel kernel_conv_2d_f16_f32; cl_kernel kernel_timestep_embedding; cl_kernel kernel_mul_mv_id_q4_0_f32_8x_flat; + cl_kernel kernel_mul_mv_id_mxfp4_f32; cl_kernel kernel_mul_mm_f32_f32_l4_lm; cl_kernel kernel_mul_mm_f16_f32_l4_lm; @@ -577,6 +591,7 @@ struct ggml_backend_opencl_context { cl_kernel kernel_transpose_32; cl_kernel kernel_transpose_32_16; cl_kernel kernel_transpose_16; + cl_kernel kernel_transpose_16_4x1; cl_mem A_s_d_max; // max scale buffer size for transpose cl_mem A_q_d_max; // max weight buffer size for transpose @@ -971,6 +986,22 @@ static void load_cl_kernels(ggml_backend_opencl_context *backend_ctx, ggml_cl_ve GGML_LOG_CONT("."); } + // mul_mv_mxfp4_f32 + { +#ifdef GGML_OPENCL_EMBED_KERNELS + const std::string kernel_src { + #include "mul_mv_mxfp4_f32.cl.h" + }; +#else + const std::string kernel_src = read_file("mul_mv_mxfp4_f32.cl"); +#endif + backend_ctx->program_mul_mv_mxfp4_f32 = + build_program_from_source(backend_ctx->context, backend_ctx->device, kernel_src.c_str(), compile_opts); + + CL_CHECK((backend_ctx->kernel_mul_mv_mxfp4_f32 = clCreateKernel(backend_ctx->program_mul_mv_mxfp4_f32, "kernel_mul_mv_mxfp4_f32", &err), err)); + GGML_LOG_CONT("."); + } + // mul_mv_f16_f16 { #ifdef GGML_OPENCL_EMBED_KERNELS @@ -1287,6 +1318,73 @@ static void load_cl_kernels(ggml_backend_opencl_context *backend_ctx, ggml_cl_ve GGML_LOG_CONT("."); } + // flash_attn + { + #ifdef GGML_OPENCL_EMBED_KERNELS + const std::string kernel_src_f16 { + #include "flash_attn_f16.cl.h" + }; + const std::string kernel_src_f32 { + #include "flash_attn_f32.cl.h" + }; + const std::string kernel_src_f32_f16 { + #include "flash_attn_f32_f16.cl.h" + }; + #else + const std::string kernel_src_f16 = read_file("flash_attn_f16.cl"); + const std::string kernel_src_f32 = read_file("flash_attn_f32.cl"); + const std::string kernel_src_f32_f16 = read_file("flash_attn_f32_f16.cl"); + #endif + + if (!kernel_src_f16.empty() && !kernel_src_f32.empty() && !kernel_src_f32_f16.empty()) { + const struct { int dk; int dv; int bm; int bn; } fa_dims[] = { + { 64, 64, 64, 64}, { 80, 80, 64, 32}, { 96, 96, 64, 32}, + {112, 112, 32, 32}, {128, 128, 32, 32}, {192, 128, 16, 16}, + {192, 192, 16, 16}, {256, 256, 16, 16}, + }; + + for (size_t i = 0; i < sizeof(fa_dims)/sizeof(fa_dims[0]); ++i) { + const int dk = fa_dims[i].dk; + const int dv = fa_dims[i].dv; + const int bm = fa_dims[i].bm; + const int bn = fa_dims[i].bn; + std::string OPTS = compile_opts + + " -D DK=" + std::to_string(dk) + + " -D DV=" + std::to_string(dv) + + " -D BLOCK_M=" + std::to_string(bm) + + " -D BLOCK_N=" + std::to_string(bn); + + cl_program prog_f16 = build_program_from_source(backend_ctx->context, backend_ctx->device, kernel_src_f16.c_str(), OPTS); + cl_kernel k_f16, k_f16_q1; + CL_CHECK((k_f16 = clCreateKernel(prog_f16, "flash_attn_f16", &err), err)); + CL_CHECK((k_f16_q1 = clCreateKernel(prog_f16, "flash_attn_f16_q1", &err), err)); + backend_ctx->kernels_flash_attn_f16[{dk, dv}] = k_f16; + backend_ctx->kernels_flash_attn_f16_q1[{dk, dv}] = k_f16_q1; + CL_CHECK(clReleaseProgram(prog_f16)); + + cl_program prog_f32 = build_program_from_source(backend_ctx->context, backend_ctx->device, kernel_src_f32.c_str(), OPTS); + cl_kernel k_f32, k_f32_q1; + CL_CHECK((k_f32 = clCreateKernel(prog_f32, "flash_attn_f32", &err), err)); + CL_CHECK((k_f32_q1 = clCreateKernel(prog_f32, "flash_attn_f32_q1", &err), err)); + backend_ctx->kernels_flash_attn_f32[{dk, dv}] = k_f32; + backend_ctx->kernels_flash_attn_f32_q1[{dk, dv}] = k_f32_q1; + CL_CHECK(clReleaseProgram(prog_f32)); + + cl_program prog_f32_f16 = build_program_from_source(backend_ctx->context, backend_ctx->device, kernel_src_f32_f16.c_str(), OPTS); + cl_kernel k_f32_f16, k_f32_f16_q1; + CL_CHECK((k_f32_f16 = clCreateKernel(prog_f32_f16, "flash_attn_f32_f16", &err), err)); + CL_CHECK((k_f32_f16_q1 = clCreateKernel(prog_f32_f16, "flash_attn_f32_f16_q1", &err), err)); + backend_ctx->kernels_flash_attn_f32_f16[{dk, dv}] = k_f32_f16; + backend_ctx->kernels_flash_attn_f32_f16_q1[{dk, dv}] = k_f32_f16_q1; + CL_CHECK(clReleaseProgram(prog_f32_f16)); + + backend_ctx->kernels_flash_attn_bm[{dk, dv}] = bm; + backend_ctx->kernels_flash_attn_bn[{dk, dv}] = bn; + } + GGML_LOG_CONT("."); + } + } + // argsort { #ifdef GGML_OPENCL_EMBED_KERNELS @@ -1611,6 +1709,22 @@ static void load_cl_kernels(ggml_backend_opencl_context *backend_ctx, ggml_cl_ve GGML_LOG_CONT("."); } + // mul_mv_id_mxfp4_f32 + { +#ifdef GGML_OPENCL_EMBED_KERNELS + const std::string kernel_src { + #include "mul_mv_id_mxfp4_f32.cl.h" + }; +#else + const std::string kernel_src = read_file("mul_mv_id_mxfp4_f32.cl"); +#endif + backend_ctx->program_mul_mv_id_mxfp4_f32 = + build_program_from_source(backend_ctx->context, backend_ctx->device, kernel_src.c_str(), compile_opts); + + CL_CHECK((backend_ctx->kernel_mul_mv_id_mxfp4_f32 = clCreateKernel(backend_ctx->program_mul_mv_id_mxfp4_f32, "kernel_mul_mv_id_mxfp4_f32", &err), err)); + GGML_LOG_CONT("."); + } + // Adreno kernels #ifdef GGML_OPENCL_USE_ADRENO_KERNELS // transpose @@ -1628,6 +1742,7 @@ static void load_cl_kernels(ggml_backend_opencl_context *backend_ctx, ggml_cl_ve CL_CHECK((backend_ctx->kernel_transpose_32_16 = clCreateKernel(backend_ctx->program_transpose, "kernel_transpose_32_16", &err), err)); CL_CHECK((backend_ctx->kernel_transpose_32 = clCreateKernel(backend_ctx->program_transpose, "kernel_transpose_32", &err), err)); CL_CHECK((backend_ctx->kernel_transpose_16 = clCreateKernel(backend_ctx->program_transpose, "kernel_transpose_16", &err), err)); + CL_CHECK((backend_ctx->kernel_transpose_16_4x1 = clCreateKernel(backend_ctx->program_transpose, "kernel_transpose_16_4x1", &err), err)); GGML_LOG_CONT("."); } @@ -2104,6 +2219,9 @@ static ggml_backend_opencl_context * ggml_cl2_init(ggml_backend_dev_t dev) { clGetDeviceInfo(device, CL_DEVICE_MAX_MEM_ALLOC_SIZE, sizeof(size_t), &backend_ctx->max_alloc_size, NULL); GGML_LOG_INFO("ggml_opencl: max mem alloc size: %zu MB\n", backend_ctx->max_alloc_size/1024/1024); + clGetDeviceInfo(device, CL_DEVICE_MAX_WORK_GROUP_SIZE, sizeof(size_t), &backend_ctx->max_workgroup_size, NULL); + GGML_LOG_INFO("ggml_opencl: device max workgroup size: %lu\n", backend_ctx->max_workgroup_size); + // Check SVM. cl_device_svm_capabilities svm_caps; CL_CHECK(clGetDeviceInfo(device, CL_DEVICE_SVM_CAPABILITIES, sizeof(cl_device_svm_capabilities), &svm_caps, 0)); @@ -2419,7 +2537,8 @@ static ggml_status ggml_backend_opencl_graph_compute(ggml_backend_t backend, ggm } static bool ggml_opencl_supports_op(ggml_backend_dev_t dev, const struct ggml_tensor * op) { - GGML_UNUSED(dev); + ggml_backend_opencl_device_context * dev_ctx = (ggml_backend_opencl_device_context *)dev->context; + ggml_backend_opencl_context * backend_ctx = dev_ctx->backend_ctx; switch (op->op) { case GGML_OP_NONE: @@ -2481,6 +2600,13 @@ static bool ggml_opencl_supports_op(ggml_backend_dev_t dev, const struct ggml_te case GGML_OP_SCALE: return op->src[0]->type == GGML_TYPE_F32 && ggml_is_contiguous(op->src[0]); case GGML_OP_ADD: + if (op->type == GGML_TYPE_F16) { + const bool src0_ok = op->src[0]->type == GGML_TYPE_F16 || op->src[0]->type == GGML_TYPE_F32; + const bool src1_ok = op->src[1]->type == GGML_TYPE_F16 || op->src[1]->type == GGML_TYPE_F32; + if (src0_ok && src1_ok) { + return true; + } + } case GGML_OP_MUL: case GGML_OP_DIV: case GGML_OP_SUB: @@ -2545,13 +2671,14 @@ static bool ggml_opencl_supports_op(ggml_backend_dev_t dev, const struct ggml_te return true; } else if (op->src[0]->type == GGML_TYPE_F32) { return op->src[1]->type == GGML_TYPE_F32; - } else if (op->src[0]->type == GGML_TYPE_Q4_0 || + } else if (op->src[0]->type == GGML_TYPE_Q4_0 || op->src[0]->type == GGML_TYPE_MXFP4 || op->src[0]->type == GGML_TYPE_Q6_K) { return op->src[1]->type == GGML_TYPE_F32 && ggml_is_contiguous(op->src[0]) && ggml_is_contiguous(op->src[1]); } return false; case GGML_OP_MUL_MAT_ID: - if (op->src[0]->type == GGML_TYPE_Q4_0) { + if (op->src[0]->type == GGML_TYPE_Q4_0 || + op->src[0]->type == GGML_TYPE_MXFP4) { if (op->src[1]->type == GGML_TYPE_F32) { return ggml_is_contiguous(op->src[0]) && ggml_is_contiguous(op->src[1]); } @@ -2586,10 +2713,58 @@ static bool ggml_opencl_supports_op(ggml_backend_dev_t dev, const struct ggml_te } case GGML_OP_IM2COL: return true; - case GGML_OP_ARGSORT: - return op->src[0]->type == GGML_TYPE_F32; + case GGML_OP_ARGSORT: { + cl_kernel kernel = backend_ctx->kernel_argsort_f32_i32; + int max_workgroup_size = backend_ctx->get_kernel_workgroup_size(kernel); + + int cols = 1; + while (cols < op->ne[0]) { + cols *= 2; + } + + return cols <= max_workgroup_size && op->src[0]->type == GGML_TYPE_F32; + } case GGML_OP_SUM_ROWS: return op->src[0]->type == GGML_TYPE_F32 && ggml_is_contiguous(op->src[0]); + case GGML_OP_FLASH_ATTN_EXT: + { + if (op->src[4]) { + return false; + } + + const ggml_tensor * q = op->src[0]; + const ggml_tensor * k = op->src[1]; + const ggml_tensor * v = op->src[2]; + + const int dk = q->ne[0]; + const int dv = v->ne[0]; + + const struct { int dk; int dv; } supported_dims[] = { + { 64, 64}, { 80, 80}, { 96, 96}, + {112, 112}, {128, 128}, {192, 128}, + {192, 192}, {256, 256}, + }; + + bool dims_supported = false; + for (size_t i = 0; i < sizeof(supported_dims)/sizeof(supported_dims[0]); ++i) { + if (supported_dims[i].dk == dk && supported_dims[i].dv == dv) { + dims_supported = true; + break; + } + } + if (!dims_supported) { + return false; + } + + const bool is_f32_f32 = q->type == GGML_TYPE_F32 && k->type == GGML_TYPE_F32 && + v->type == GGML_TYPE_F32 && op->type == GGML_TYPE_F32; + const bool is_f16_f16 = q->type == GGML_TYPE_F16 && k->type == GGML_TYPE_F16 && + v->type == GGML_TYPE_F16 && op->type == GGML_TYPE_F16; + const bool is_f32_f16 = q->type == GGML_TYPE_F32 && k->type == GGML_TYPE_F16 && + v->type == GGML_TYPE_F16 && op->type == GGML_TYPE_F32; + + return is_f32_f32 || is_f16_f16 || is_f32_f16; + } default: return false; } @@ -2937,7 +3112,10 @@ static void ggml_backend_opencl_buffer_set_tensor(ggml_backend_buffer_t buffer, // cl_mem qT_d = clCreateBuffer(context, CL_MEM_READ_WRITE, q_size_bytes, NULL, &err); CL_CHECK(err); - // size_t d_size_bytes = M * (K / 32) / 2 * sizeof(float); + bool K_tile_trans = true; + if ((K / 32) % 4 != 0){ + K_tile_trans =false; + } size_t d_size_bytes = M * (K / 32) * 2; region.origin = 0; region.size = d_size_bytes; @@ -2978,10 +3156,15 @@ static void ggml_backend_opencl_buffer_set_tensor(ggml_backend_buffer_t buffer, qT_d_image1D = clCreateImage(context, 0, &img_fmt_1d, &img_desc_1d, NULL, &err); CL_CHECK(err); - img_fmt_1d = { CL_RGBA, CL_HALF_FLOAT }; memset(&img_desc_1d, 0, sizeof(img_desc_1d)); + if (K_tile_trans) { + img_fmt_1d = { CL_RGBA, CL_HALF_FLOAT }; + img_desc_1d.image_width = M * K / 32 / 4; + } else { + img_fmt_1d = { CL_R, CL_HALF_FLOAT }; + img_desc_1d.image_width = M * K / 32; + } img_desc_1d.image_type = CL_MEM_OBJECT_IMAGE1D_BUFFER; - img_desc_1d.image_width = M * K / 32 / 4; img_desc_1d.buffer = extra->d; d_d_image1D = clCreateImage(context, 0, &img_fmt_1d, &img_desc_1d, NULL, &err); CL_CHECK(err); @@ -3017,6 +3200,10 @@ static void ggml_backend_opencl_buffer_set_tensor(ggml_backend_buffer_t buffer, int width_s = K / 32 / 4; kernel = backend_ctx->kernel_transpose_16; + if (!K_tile_trans) { + kernel = backend_ctx->kernel_transpose_16_4x1; + width_s = K / 32; + } CL_CHECK(clSetKernelArg(kernel, 0, sizeof(cl_mem), &d_d_image1D)); CL_CHECK(clSetKernelArg(kernel, 1, sizeof(cl_mem), &dT_d_image1D)); CL_CHECK(clSetKernelArg(kernel, 2, sizeof(int), &height_s)); @@ -3717,34 +3904,30 @@ static void ggml_cl_add(ggml_backend_t backend, const ggml_tensor * src0, const GGML_ASSERT(dst); GGML_ASSERT(dst->extra); - GGML_ASSERT(src0->type == src1->type); - GGML_ASSERT(src0->type == dst->type); - GGML_ASSERT(src0->type == GGML_TYPE_F32 || src0->type == GGML_TYPE_F16); - - const int ne00 = src0->ne[0]; - const int ne01 = src0->ne[1]; - const int ne02 = src0->ne[2]; - const int ne03 = src0->ne[3]; + const int ne00 = src0->ne[0]; + const int ne01 = src0->ne[1]; + const int ne02 = src0->ne[2]; + const int ne03 = src0->ne[3]; const cl_ulong nb00 = src0->nb[0]; const cl_ulong nb01 = src0->nb[1]; const cl_ulong nb02 = src0->nb[2]; const cl_ulong nb03 = src0->nb[3]; - const int ne10 = src1->ne[0]; - const int ne11 = src1->ne[1]; - const int ne12 = src1->ne[2]; - const int ne13 = src1->ne[3]; UNUSED(ne13); + const int ne10 = src1->ne[0]; + const int ne11 = src1->ne[1]; + const int ne12 = src1->ne[2]; + const int ne13 = src1->ne[3]; const cl_ulong nb10 = src1->nb[0]; const cl_ulong nb11 = src1->nb[1]; const cl_ulong nb12 = src1->nb[2]; - const cl_ulong nb13 = src1->nb[3]; UNUSED(nb13); + const cl_ulong nb13 = src1->nb[3]; - const int ne0 = dst->ne[0]; - const int ne1 = dst->ne[1]; - const int ne2 = dst->ne[2]; - const int ne3 = dst->ne[3]; + const int ne0 = dst->ne[0]; + const int ne1 = dst->ne[1]; + const int ne2 = dst->ne[2]; + const int ne3 = dst->ne[3]; const cl_ulong nb0 = dst->nb[0]; const cl_ulong nb1 = dst->nb[1]; @@ -3761,68 +3944,114 @@ static void ggml_cl_add(ggml_backend_t backend, const ggml_tensor * src0, const cl_ulong offset1 = extra1->offset + src1->view_offs; cl_ulong offsetd = extrad->offset + dst->view_offs; - bool bcast_row = false; cl_kernel kernel; - if (ggml_nelements(src1) == ne10 && ggml_is_contiguous(src1) && ne00 % 4 == 0 && ne10 % 4 == 0) { + const bool bcast_row = ggml_nelements(src1) == ne10 && ggml_is_contiguous(src1) && ne00 % 4 == 0 && ne10 % 4 == 0; + + if (bcast_row) { GGML_ASSERT(ggml_is_contiguous(src0)); - - // src1 is a row GGML_ASSERT(ne11 == 1); + } - bcast_row = true; - int ne = ne00 / 4; - - if (src0->type == GGML_TYPE_F32) { + if (dst->type == GGML_TYPE_F32) { + GGML_ASSERT(src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32); + if (bcast_row) { kernel = backend_ctx->kernel_add_row; + const int ne = ne00 / 4; + CL_CHECK(clSetKernelArg(kernel, 0, sizeof(cl_mem), &extra0->data_device)); + CL_CHECK(clSetKernelArg(kernel, 1, sizeof(cl_ulong), &offset0)); + CL_CHECK(clSetKernelArg(kernel, 2, sizeof(cl_mem), &extra1->data_device)); + CL_CHECK(clSetKernelArg(kernel, 3, sizeof(cl_ulong), &offset1)); + CL_CHECK(clSetKernelArg(kernel, 4, sizeof(cl_mem), &extrad->data_device)); + CL_CHECK(clSetKernelArg(kernel, 5, sizeof(cl_ulong), &offsetd)); + CL_CHECK(clSetKernelArg(kernel, 6, sizeof(int), &ne)); } else { - kernel = backend_ctx->kernel_add_row_f16; - } - - CL_CHECK(clSetKernelArg(kernel, 0, sizeof(cl_mem), &extra0->data_device)); - CL_CHECK(clSetKernelArg(kernel, 1, sizeof(cl_ulong), &offset0)); - CL_CHECK(clSetKernelArg(kernel, 2, sizeof(cl_mem), &extra1->data_device)); - CL_CHECK(clSetKernelArg(kernel, 3, sizeof(cl_ulong), &offset1)); - CL_CHECK(clSetKernelArg(kernel, 4, sizeof(cl_mem), &extrad->data_device)); - CL_CHECK(clSetKernelArg(kernel, 5, sizeof(cl_ulong), &offsetd)); - CL_CHECK(clSetKernelArg(kernel, 6, sizeof(int), &ne)); - } else { - if (src0->type == GGML_TYPE_F32) { kernel = backend_ctx->kernel_add; + CL_CHECK(clSetKernelArg(kernel, 0, sizeof(cl_mem), &extra0->data_device)); + CL_CHECK(clSetKernelArg(kernel, 1, sizeof(cl_ulong), &offset0)); + CL_CHECK(clSetKernelArg(kernel, 2, sizeof(cl_mem), &extra1->data_device)); + CL_CHECK(clSetKernelArg(kernel, 3, sizeof(cl_ulong), &offset1)); + CL_CHECK(clSetKernelArg(kernel, 4, sizeof(cl_mem), &extrad->data_device)); + CL_CHECK(clSetKernelArg(kernel, 5, sizeof(cl_ulong), &offsetd)); + CL_CHECK(clSetKernelArg(kernel, 6, sizeof(int), &ne00)); + CL_CHECK(clSetKernelArg(kernel, 7, sizeof(int), &ne01)); + CL_CHECK(clSetKernelArg(kernel, 8, sizeof(int), &ne02)); + CL_CHECK(clSetKernelArg(kernel, 9, sizeof(int), &ne03)); + CL_CHECK(clSetKernelArg(kernel, 10, sizeof(cl_ulong), &nb00)); + CL_CHECK(clSetKernelArg(kernel, 11, sizeof(cl_ulong), &nb01)); + CL_CHECK(clSetKernelArg(kernel, 12, sizeof(cl_ulong), &nb02)); + CL_CHECK(clSetKernelArg(kernel, 13, sizeof(cl_ulong), &nb03)); + CL_CHECK(clSetKernelArg(kernel, 14, sizeof(int), &ne10)); + CL_CHECK(clSetKernelArg(kernel, 15, sizeof(int), &ne11)); + CL_CHECK(clSetKernelArg(kernel, 16, sizeof(int), &ne12)); + CL_CHECK(clSetKernelArg(kernel, 17, sizeof(int), &ne13)); + CL_CHECK(clSetKernelArg(kernel, 18, sizeof(cl_ulong), &nb10)); + CL_CHECK(clSetKernelArg(kernel, 19, sizeof(cl_ulong), &nb11)); + CL_CHECK(clSetKernelArg(kernel, 20, sizeof(cl_ulong), &nb12)); + CL_CHECK(clSetKernelArg(kernel, 21, sizeof(cl_ulong), &nb13)); + CL_CHECK(clSetKernelArg(kernel, 22, sizeof(int), &ne0)); + CL_CHECK(clSetKernelArg(kernel, 23, sizeof(int), &ne1)); + CL_CHECK(clSetKernelArg(kernel, 24, sizeof(int), &ne2)); + CL_CHECK(clSetKernelArg(kernel, 25, sizeof(int), &ne3)); + CL_CHECK(clSetKernelArg(kernel, 26, sizeof(cl_ulong), &nb0)); + CL_CHECK(clSetKernelArg(kernel, 27, sizeof(cl_ulong), &nb1)); + CL_CHECK(clSetKernelArg(kernel, 28, sizeof(cl_ulong), &nb2)); + CL_CHECK(clSetKernelArg(kernel, 29, sizeof(cl_ulong), &nb3)); + } + } else if (dst->type == GGML_TYPE_F16) { + GGML_ASSERT(src0->type == GGML_TYPE_F16 || src0->type == GGML_TYPE_F32); + GGML_ASSERT(src1->type == GGML_TYPE_F16 || src1->type == GGML_TYPE_F32); + const int type_src0 = (src0->type == GGML_TYPE_F32); + const int type_src1 = (src1->type == GGML_TYPE_F32); + if (bcast_row) { + kernel = backend_ctx->kernel_add_row_f16; + const int ne = ne00 / 4; + CL_CHECK(clSetKernelArg(kernel, 0, sizeof(cl_mem), &extra0->data_device)); + CL_CHECK(clSetKernelArg(kernel, 1, sizeof(cl_ulong), &offset0)); + CL_CHECK(clSetKernelArg(kernel, 2, sizeof(cl_mem), &extra1->data_device)); + CL_CHECK(clSetKernelArg(kernel, 3, sizeof(cl_ulong), &offset1)); + CL_CHECK(clSetKernelArg(kernel, 4, sizeof(cl_mem), &extrad->data_device)); + CL_CHECK(clSetKernelArg(kernel, 5, sizeof(cl_ulong), &offsetd)); + CL_CHECK(clSetKernelArg(kernel, 6, sizeof(int), &ne)); + CL_CHECK(clSetKernelArg(kernel, 7, sizeof(int), &type_src0)); + CL_CHECK(clSetKernelArg(kernel, 8, sizeof(int), &type_src1)); } else { kernel = backend_ctx->kernel_add_f16; + CL_CHECK(clSetKernelArg(kernel, 0, sizeof(cl_mem), &extra0->data_device)); + CL_CHECK(clSetKernelArg(kernel, 1, sizeof(cl_ulong), &offset0)); + CL_CHECK(clSetKernelArg(kernel, 2, sizeof(cl_mem), &extra1->data_device)); + CL_CHECK(clSetKernelArg(kernel, 3, sizeof(cl_ulong), &offset1)); + CL_CHECK(clSetKernelArg(kernel, 4, sizeof(cl_mem), &extrad->data_device)); + CL_CHECK(clSetKernelArg(kernel, 5, sizeof(cl_ulong), &offsetd)); + CL_CHECK(clSetKernelArg(kernel, 6, sizeof(int), &ne00)); + CL_CHECK(clSetKernelArg(kernel, 7, sizeof(int), &ne01)); + CL_CHECK(clSetKernelArg(kernel, 8, sizeof(int), &ne02)); + CL_CHECK(clSetKernelArg(kernel, 9, sizeof(int), &ne03)); + CL_CHECK(clSetKernelArg(kernel, 10, sizeof(cl_ulong), &nb00)); + CL_CHECK(clSetKernelArg(kernel, 11, sizeof(cl_ulong), &nb01)); + CL_CHECK(clSetKernelArg(kernel, 12, sizeof(cl_ulong), &nb02)); + CL_CHECK(clSetKernelArg(kernel, 13, sizeof(cl_ulong), &nb03)); + CL_CHECK(clSetKernelArg(kernel, 14, sizeof(int), &ne10)); + CL_CHECK(clSetKernelArg(kernel, 15, sizeof(int), &ne11)); + CL_CHECK(clSetKernelArg(kernel, 16, sizeof(int), &ne12)); + CL_CHECK(clSetKernelArg(kernel, 17, sizeof(int), &ne13)); + CL_CHECK(clSetKernelArg(kernel, 18, sizeof(cl_ulong), &nb10)); + CL_CHECK(clSetKernelArg(kernel, 19, sizeof(cl_ulong), &nb11)); + CL_CHECK(clSetKernelArg(kernel, 20, sizeof(cl_ulong), &nb12)); + CL_CHECK(clSetKernelArg(kernel, 21, sizeof(cl_ulong), &nb13)); + CL_CHECK(clSetKernelArg(kernel, 22, sizeof(int), &ne0)); + CL_CHECK(clSetKernelArg(kernel, 23, sizeof(int), &ne1)); + CL_CHECK(clSetKernelArg(kernel, 24, sizeof(int), &ne2)); + CL_CHECK(clSetKernelArg(kernel, 25, sizeof(int), &ne3)); + CL_CHECK(clSetKernelArg(kernel, 26, sizeof(cl_ulong), &nb0)); + CL_CHECK(clSetKernelArg(kernel, 27, sizeof(cl_ulong), &nb1)); + CL_CHECK(clSetKernelArg(kernel, 28, sizeof(cl_ulong), &nb2)); + CL_CHECK(clSetKernelArg(kernel, 29, sizeof(cl_ulong), &nb3)); + CL_CHECK(clSetKernelArg(kernel, 30, sizeof(int), &type_src0)); + CL_CHECK(clSetKernelArg(kernel, 31, sizeof(int), &type_src1)); } - - CL_CHECK(clSetKernelArg(kernel, 0, sizeof(cl_mem), &extra0->data_device)); - CL_CHECK(clSetKernelArg(kernel, 1, sizeof(cl_ulong), &offset0)); - CL_CHECK(clSetKernelArg(kernel, 2, sizeof(cl_mem), &extra1->data_device)); - CL_CHECK(clSetKernelArg(kernel, 3, sizeof(cl_ulong), &offset1)); - CL_CHECK(clSetKernelArg(kernel, 4, sizeof(cl_mem), &extrad->data_device)); - CL_CHECK(clSetKernelArg(kernel, 5, sizeof(cl_ulong), &offsetd)); - CL_CHECK(clSetKernelArg(kernel, 6, sizeof(int), &ne00)); - CL_CHECK(clSetKernelArg(kernel, 7, sizeof(int), &ne01)); - CL_CHECK(clSetKernelArg(kernel, 8, sizeof(int), &ne02)); - CL_CHECK(clSetKernelArg(kernel, 9, sizeof(int), &ne03)); - CL_CHECK(clSetKernelArg(kernel, 10, sizeof(cl_ulong), &nb00)); - CL_CHECK(clSetKernelArg(kernel, 11, sizeof(cl_ulong), &nb01)); - CL_CHECK(clSetKernelArg(kernel, 12, sizeof(cl_ulong), &nb02)); - CL_CHECK(clSetKernelArg(kernel, 13, sizeof(cl_ulong), &nb03)); - CL_CHECK(clSetKernelArg(kernel, 14, sizeof(int), &ne10)); - CL_CHECK(clSetKernelArg(kernel, 15, sizeof(int), &ne11)); - CL_CHECK(clSetKernelArg(kernel, 16, sizeof(int), &ne12)); - CL_CHECK(clSetKernelArg(kernel, 17, sizeof(int), &ne13)); - CL_CHECK(clSetKernelArg(kernel, 18, sizeof(cl_ulong), &nb10)); - CL_CHECK(clSetKernelArg(kernel, 19, sizeof(cl_ulong), &nb11)); - CL_CHECK(clSetKernelArg(kernel, 20, sizeof(cl_ulong), &nb12)); - CL_CHECK(clSetKernelArg(kernel, 21, sizeof(cl_ulong), &nb13)); - CL_CHECK(clSetKernelArg(kernel, 22, sizeof(int), &ne0)); - CL_CHECK(clSetKernelArg(kernel, 23, sizeof(int), &ne1)); - CL_CHECK(clSetKernelArg(kernel, 24, sizeof(int), &ne2)); - CL_CHECK(clSetKernelArg(kernel, 25, sizeof(int), &ne3)); - CL_CHECK(clSetKernelArg(kernel, 26, sizeof(cl_ulong), &nb0)); - CL_CHECK(clSetKernelArg(kernel, 27, sizeof(cl_ulong), &nb1)); - CL_CHECK(clSetKernelArg(kernel, 28, sizeof(cl_ulong), &nb2)); - CL_CHECK(clSetKernelArg(kernel, 29, sizeof(cl_ulong), &nb3)); + } else { + GGML_ASSERT(false && "unsupported data types for add"); } if (bcast_row) { @@ -3832,13 +4061,13 @@ static void ggml_cl_add(ggml_backend_t backend, const ggml_tensor * src0, const size_t * local_work_size_ptr = local_work_size; if (n % 64 != 0 && !backend_ctx->non_uniform_workgroups) { - local_work_size_ptr = nullptr; // Let driver choose the work-group sizes. + local_work_size_ptr = nullptr; } - backend_ctx->enqueue_ndrange_kernel(kernel, 3, global_work_size, local_work_size_ptr, dst); + backend_ctx->enqueue_ndrange_kernel(kernel, 1, global_work_size, local_work_size_ptr, dst); } else { unsigned int nth = MIN(64, ne0); - size_t global_work_size[] = {ne01*nth, (size_t)ne02, (size_t)ne03}; + size_t global_work_size[] = {(size_t)ne01*nth, (size_t)ne02, (size_t)ne03}; size_t local_work_size[] = {nth, 1, 1}; backend_ctx->enqueue_ndrange_kernel(kernel, 3, global_work_size, local_work_size, dst); @@ -5351,6 +5580,133 @@ static void ggml_cl_timestep_embedding(ggml_backend_t backend, const ggml_tensor backend_ctx->enqueue_ndrange_kernel(kernel, 3, global_work_size, NULL, dst); } +static void ggml_cl_flash_attn(ggml_backend_t backend, const ggml_tensor * q, const ggml_tensor * k, ggml_tensor * dst) { + const ggml_tensor * v = dst->src[2]; + const ggml_tensor * mask = dst->src[3]; + GGML_ASSERT(q->extra); + GGML_ASSERT(k->extra); + GGML_ASSERT(v->extra); + GGML_ASSERT(dst->extra); + if (mask) { + GGML_ASSERT(mask->extra); + } + + ggml_backend_opencl_context *backend_ctx = (ggml_backend_opencl_context *)backend->context; + + const int n_q = q->ne[1]; + const int n_kv = k->ne[1]; + const int d_head_q = q->ne[0]; + const int d_head_v = v->ne[0]; + const int n_head = q->ne[2]; + const int n_head_kv = k->ne[2]; + const int n_batch = q->ne[3]; + + cl_kernel kernel = NULL; + + const bool is_f16 = q->type == GGML_TYPE_F16; + const bool is_mixed = q->type == GGML_TYPE_F32 && k->type == GGML_TYPE_F16; + const std::pair dk_dv = {d_head_q, d_head_v}; + + if (n_q == 1) { + if (is_mixed) { + kernel = backend_ctx->kernels_flash_attn_f32_f16_q1.at(dk_dv); + } else if (is_f16) { + kernel = backend_ctx->kernels_flash_attn_f16_q1.at(dk_dv); + } else { + kernel = backend_ctx->kernels_flash_attn_f32_q1.at(dk_dv); + } + } else { + if (is_mixed) { + kernel = backend_ctx->kernels_flash_attn_f32_f16.at(dk_dv); + } else if (is_f16) { + kernel = backend_ctx->kernels_flash_attn_f16.at(dk_dv); + } else { + kernel = backend_ctx->kernels_flash_attn_f32.at(dk_dv); + } + } + GGML_ASSERT(kernel != NULL); + + ggml_tensor_extra_cl * extra_q = (ggml_tensor_extra_cl *)q->extra; + ggml_tensor_extra_cl * extra_k = (ggml_tensor_extra_cl *)k->extra; + ggml_tensor_extra_cl * extra_v = (ggml_tensor_extra_cl *)v->extra; + ggml_tensor_extra_cl * extra_o = (ggml_tensor_extra_cl *)dst->extra; + ggml_tensor_extra_cl * extra_mask = mask ? (ggml_tensor_extra_cl *)mask->extra : NULL; + + cl_ulong offset_q = extra_q->offset + q->view_offs; + cl_ulong offset_k = extra_k->offset + k->view_offs; + cl_ulong offset_v = extra_v->offset + v->view_offs; + cl_ulong offset_o = extra_o->offset + dst->view_offs; + cl_mem mask_buffer = extra_mask ? extra_mask->data_device : NULL; + cl_ulong offset_mask = extra_mask ? extra_mask->offset + mask->view_offs : 0; + + const cl_ulong q_nb1 = q->nb[1], q_nb2 = q->nb[2], q_nb3 = q->nb[3]; + const cl_ulong k_nb1 = k->nb[1], k_nb2 = k->nb[2], k_nb3 = k->nb[3]; + const cl_ulong v_nb1 = v->nb[1], v_nb2 = v->nb[2], v_nb3 = v->nb[3]; + const cl_ulong o_nb1 = dst->nb[1], o_nb2 = dst->nb[2], o_nb3 = dst->nb[3]; + const cl_ulong mask_nb1 = mask ? mask->nb[1] : 0; + const cl_ulong mask_nb2 = mask ? mask->nb[2] : 0; + const cl_ulong mask_nb3 = mask ? mask->nb[3] : 0; + const int mask_ne2 = mask ? mask->ne[2] : 0; + const int mask_ne3 = mask ? mask->ne[3] : 0; + + float scale, max_bias, logit_softcap; + const float * params = (const float *)dst->op_params; + scale = params[0]; + max_bias = params[1]; + logit_softcap = params[2]; + + const int is_causal = (mask == NULL && n_q > 1 && n_q == n_kv); + + const int n_head_log2_val = n_head > 0 ? 1u << (int)floorf(log2f((float)n_head)) : 0; + const float n_head_log2_f = n_head_log2_val > 0 ? (float)n_head_log2_val : 1.0f; + const float m0 = powf(2.0f, -(max_bias) / n_head_log2_f); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2_f); + + CL_CHECK(clSetKernelArg(kernel, 0, sizeof(cl_mem), &extra_q->data_device)); + CL_CHECK(clSetKernelArg(kernel, 1, sizeof(cl_ulong), &offset_q)); + CL_CHECK(clSetKernelArg(kernel, 2, sizeof(cl_mem), &extra_k->data_device)); + CL_CHECK(clSetKernelArg(kernel, 3, sizeof(cl_ulong), &offset_k)); + CL_CHECK(clSetKernelArg(kernel, 4, sizeof(cl_mem), &extra_v->data_device)); + CL_CHECK(clSetKernelArg(kernel, 5, sizeof(cl_ulong), &offset_v)); + CL_CHECK(clSetKernelArg(kernel, 6, sizeof(cl_mem), &extra_o->data_device)); + CL_CHECK(clSetKernelArg(kernel, 7, sizeof(cl_ulong), &offset_o)); + CL_CHECK(clSetKernelArg(kernel, 8, sizeof(float), &scale)); + CL_CHECK(clSetKernelArg(kernel, 9, sizeof(int), &n_q)); + CL_CHECK(clSetKernelArg(kernel, 10, sizeof(int), &n_kv)); + CL_CHECK(clSetKernelArg(kernel, 11, sizeof(int), &is_causal)); + CL_CHECK(clSetKernelArg(kernel, 12, sizeof(int), &n_head)); + CL_CHECK(clSetKernelArg(kernel, 13, sizeof(cl_ulong), &q_nb1)); CL_CHECK(clSetKernelArg(kernel, 14, sizeof(cl_ulong), &q_nb2)); CL_CHECK(clSetKernelArg(kernel, 15, sizeof(cl_ulong), &q_nb3)); + CL_CHECK(clSetKernelArg(kernel, 16, sizeof(cl_ulong), &k_nb1)); CL_CHECK(clSetKernelArg(kernel, 17, sizeof(cl_ulong), &k_nb2)); CL_CHECK(clSetKernelArg(kernel, 18, sizeof(cl_ulong), &k_nb3)); + CL_CHECK(clSetKernelArg(kernel, 19, sizeof(cl_ulong), &v_nb1)); CL_CHECK(clSetKernelArg(kernel, 20, sizeof(cl_ulong), &v_nb2)); CL_CHECK(clSetKernelArg(kernel, 21, sizeof(cl_ulong), &v_nb3)); + CL_CHECK(clSetKernelArg(kernel, 22, sizeof(cl_ulong), &o_nb1)); CL_CHECK(clSetKernelArg(kernel, 23, sizeof(cl_ulong), &o_nb2)); CL_CHECK(clSetKernelArg(kernel, 24, sizeof(cl_ulong), &o_nb3)); + CL_CHECK(clSetKernelArg(kernel, 25, sizeof(float), &max_bias)); + CL_CHECK(clSetKernelArg(kernel, 26, sizeof(float), &m0)); + CL_CHECK(clSetKernelArg(kernel, 27, sizeof(float), &m1)); + CL_CHECK(clSetKernelArg(kernel, 28, sizeof(int), &n_head_log2_val)); + CL_CHECK(clSetKernelArg(kernel, 29, sizeof(float), &logit_softcap)); + CL_CHECK(clSetKernelArg(kernel, 30, sizeof(int), &n_head_kv)); + CL_CHECK(clSetKernelArg(kernel, 31, sizeof(cl_mem), &mask_buffer)); + CL_CHECK(clSetKernelArg(kernel, 32, sizeof(cl_ulong), &offset_mask)); + CL_CHECK(clSetKernelArg(kernel, 33, sizeof(cl_ulong), &mask_nb1)); + CL_CHECK(clSetKernelArg(kernel, 34, sizeof(cl_ulong), &mask_nb2)); + CL_CHECK(clSetKernelArg(kernel, 35, sizeof(cl_ulong), &mask_nb3)); + CL_CHECK(clSetKernelArg(kernel, 36, sizeof(int), &mask_ne2)); + CL_CHECK(clSetKernelArg(kernel, 37, sizeof(int), &mask_ne3)); + + if (n_q == 1) { + const size_t wg_size = 64; + size_t local_work_size[] = { wg_size, 1 }; + size_t global_work_size[] = { wg_size, (size_t)(n_head * n_batch) }; + backend_ctx->enqueue_ndrange_kernel(kernel, 2, global_work_size, local_work_size, dst); + } else { + const int block_m = backend_ctx->kernels_flash_attn_bm.at(dk_dv); + const size_t wg_size = block_m; + size_t local_work_size[] = { wg_size, 1 }; + size_t global_work_size[] = { (size_t)((n_q + block_m - 1) / block_m) * wg_size, (size_t)(n_head * n_batch) }; + backend_ctx->enqueue_ndrange_kernel(kernel, 2, global_work_size, local_work_size, dst); + } +} + static void ggml_cl_mul_mat_f16_f32_tiled(ggml_backend_t backend, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_backend_opencl_context *backend_ctx = (ggml_backend_opencl_context *)backend->context; @@ -6205,11 +6561,47 @@ static void ggml_cl_mul_mat(ggml_backend_t backend, const ggml_tensor * src0, co CL_CHECK(clSetKernelArg(kernel, 13, sizeof(int), &r2)); CL_CHECK(clSetKernelArg(kernel, 14, sizeof(int), &r3)); break; + case GGML_TYPE_MXFP4: { + kernel = backend_ctx->kernel_mul_mv_mxfp4_f32; + + if (backend_ctx->gpu_family == INTEL) { + nth0 = 16; + nth1 = 2; + ndst = nth1*2; + } else if (backend_ctx->gpu_family == ADRENO) { + nth0 = 64; + nth1 = 2; + ndst = nth1*2; + } else { + GGML_ASSERT(false && "TODO: Unknown GPU"); + } + + CL_CHECK(clSetKernelArg(kernel, 0, sizeof(cl_mem), &extra0->data_device)); + CL_CHECK(clSetKernelArg(kernel, 1, sizeof(cl_ulong), &offset0)); + CL_CHECK(clSetKernelArg(kernel, 2, sizeof(cl_mem), &extra1->data_device)); + CL_CHECK(clSetKernelArg(kernel, 3, sizeof(cl_ulong), &offset1)); + CL_CHECK(clSetKernelArg(kernel, 4, sizeof(cl_mem), &extrad->data_device)); + CL_CHECK(clSetKernelArg(kernel, 5, sizeof(cl_ulong), &offsetd)); + CL_CHECK(clSetKernelArg(kernel, 6, sizeof(int), &ne00)); + CL_CHECK(clSetKernelArg(kernel, 7, sizeof(cl_ulong), &nb01)); + CL_CHECK(clSetKernelArg(kernel, 8, sizeof(cl_ulong), &nb02)); + CL_CHECK(clSetKernelArg(kernel, 9, sizeof(cl_ulong), &nb03)); + CL_CHECK(clSetKernelArg(kernel, 10, sizeof(int), &ne12)); + CL_CHECK(clSetKernelArg(kernel, 11, sizeof(cl_ulong), &nb11)); + CL_CHECK(clSetKernelArg(kernel, 12, sizeof(cl_ulong), &nb12)); + CL_CHECK(clSetKernelArg(kernel, 13, sizeof(cl_ulong), &nb13)); + CL_CHECK(clSetKernelArg(kernel, 14, sizeof(int), &ne0)); + CL_CHECK(clSetKernelArg(kernel, 15, sizeof(int), &ne1)); + CL_CHECK(clSetKernelArg(kernel, 16, sizeof(int), &r2)); + CL_CHECK(clSetKernelArg(kernel, 17, sizeof(int), &r3)); + CL_CHECK(clSetKernelArg(kernel, 18, sizeof(float)*nth0,nullptr)); + break; + } default: GGML_ASSERT(false && "not implemented"); } - if (src0t == GGML_TYPE_Q4_0 || + if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_MXFP4 || src0t == GGML_TYPE_Q4_1 || src0t == GGML_TYPE_Q8_0 || src0t == GGML_TYPE_Q2_K) { @@ -6258,10 +6650,12 @@ static void ggml_cl_mul_mat_id(ggml_backend_t backend, const ggml_tensor * src0, ggml_backend_opencl_context *backend_ctx = (ggml_backend_opencl_context *)backend->context; + ggml_tensor_extra_cl * extra0 = (ggml_tensor_extra_cl *)src0->extra; ggml_tensor_extra_cl * extra1 = (ggml_tensor_extra_cl *)src1->extra; ggml_tensor_extra_cl * extra2 = (ggml_tensor_extra_cl *)src2->extra; ggml_tensor_extra_cl * extrad = (ggml_tensor_extra_cl *)dst->extra; + cl_ulong offset0 = extra0->offset + src0->view_offs; cl_ulong offset1 = extra1->offset + src1->view_offs; cl_ulong offset2 = extra2->offset + src2->view_offs; cl_ulong offsetd = extrad->offset + dst->view_offs; @@ -6276,7 +6670,9 @@ static void ggml_cl_mul_mat_id(ggml_backend_t backend, const ggml_tensor * src0, const int ne03 = src0->ne[3]; const cl_ulong nb00 = src0->nb[0]; + const cl_ulong nb01 = src0->nb[1]; const cl_ulong nb02 = src0->nb[2]; + const cl_ulong nb03 = src0->nb[3]; const int ne10 = src1->ne[0]; const int ne11 = src1->ne[1]; @@ -6285,6 +6681,7 @@ static void ggml_cl_mul_mat_id(ggml_backend_t backend, const ggml_tensor * src0, const cl_ulong nb11 = src1->nb[1]; const cl_ulong nb12 = src1->nb[2]; + const cl_ulong nb13 = src1->nb[3]; const int ne20 = src2->ne[0]; const int ne21 = src2->ne[1]; @@ -6352,6 +6749,49 @@ static void ggml_cl_mul_mat_id(ggml_backend_t backend, const ggml_tensor * src0, break; } + case GGML_TYPE_MXFP4: { + kernel = backend_ctx->kernel_mul_mv_id_mxfp4_f32; + + if (backend_ctx->gpu_family == INTEL) { + sgs = 16; + nsg = 2; + ndst = 2; + } else if (backend_ctx->gpu_family == ADRENO) { + sgs = 64; + nsg = 2; + ndst = 2; + } else { + GGML_ASSERT(false && "TODO: Unknown GPU"); + } + + CL_CHECK(clSetKernelArg(kernel, 0, sizeof(cl_mem), &extra0->data_device)); + CL_CHECK(clSetKernelArg(kernel, 1, sizeof(cl_ulong), &offset0)); + CL_CHECK(clSetKernelArg(kernel, 2, sizeof(cl_mem), &extra1->data_device)); + CL_CHECK(clSetKernelArg(kernel, 3, sizeof(cl_ulong), &offset1)); + CL_CHECK(clSetKernelArg(kernel, 4, sizeof(cl_mem), &extra2->data_device)); + CL_CHECK(clSetKernelArg(kernel, 5, sizeof(cl_ulong), &offset2)); + CL_CHECK(clSetKernelArg(kernel, 6, sizeof(cl_mem), &extrad->data_device)); + CL_CHECK(clSetKernelArg(kernel, 7, sizeof(cl_ulong), &offsetd)); + CL_CHECK(clSetKernelArg(kernel, 8, sizeof(int), &ne00)); + CL_CHECK(clSetKernelArg(kernel, 9, sizeof(cl_ulong), &nb01)); + CL_CHECK(clSetKernelArg(kernel, 10, sizeof(cl_ulong), &nb02)); + CL_CHECK(clSetKernelArg(kernel, 11, sizeof(cl_ulong), &nb03)); + CL_CHECK(clSetKernelArg(kernel, 12, sizeof(int), &ne11)); + CL_CHECK(clSetKernelArg(kernel, 13, sizeof(int), &ne12)); + CL_CHECK(clSetKernelArg(kernel, 14, sizeof(cl_ulong), &nb11)); + CL_CHECK(clSetKernelArg(kernel, 15, sizeof(cl_ulong), &nb12)); + CL_CHECK(clSetKernelArg(kernel, 16, sizeof(cl_ulong), &nb13)); + CL_CHECK(clSetKernelArg(kernel, 17, sizeof(int), &ne20)); + CL_CHECK(clSetKernelArg(kernel, 18, sizeof(int), &ne21)); + CL_CHECK(clSetKernelArg(kernel, 19, sizeof(cl_ulong), &nb21)); + CL_CHECK(clSetKernelArg(kernel, 20, sizeof(int), &ne0)); + CL_CHECK(clSetKernelArg(kernel, 21, sizeof(int), &ne1)); + CL_CHECK(clSetKernelArg(kernel, 22, sizeof(int), &r2)); + CL_CHECK(clSetKernelArg(kernel, 23, sizeof(int), &r3)); + CL_CHECK(clSetKernelArg(kernel, 24, sizeof(float)*sgs,nullptr)); + + break; + } default: GGML_ASSERT(false && "not implemented");; } @@ -7423,6 +7863,12 @@ bool ggml_cl_compute_forward(ggml_backend_t backend, struct ggml_tensor * tensor } func = ggml_cl_sum_rows; break; + case GGML_OP_FLASH_ATTN_EXT: + if (!any_on_device) { + return false; + } + ggml_cl_flash_attn(backend, tensor->src[0], tensor->src[1], tensor); + return true; default: return false; } diff --git a/ggml/src/ggml-opencl/kernels/add.cl b/ggml/src/ggml-opencl/kernels/add.cl index 8bc926c889..509bf17344 100644 --- a/ggml/src/ggml-opencl/kernels/add.cl +++ b/ggml/src/ggml-opencl/kernels/add.cl @@ -112,7 +112,9 @@ kernel void kernel_add_f16( ulong nb0, ulong nb1, ulong nb2, - ulong nb3 + ulong nb3, + int type_src0, + int type_src1 ) { src0 = src0 + offset0; src1 = src1 + offset1; @@ -132,25 +134,57 @@ kernel void kernel_add_f16( for (int i0 = get_local_id(0); i0 < ne0; i0 += get_local_size(0)) { const int i10 = i0 % ne10; - *((global half *)(dst_ptr + i0*nb0)) = *((global half *)(src0_ptr + i0*nb00)) + *((global half *)(src1_ptr + i10*nb10)); + + half v0, v1; + if (type_src0 == 1) { + v0 = convert_half(*((global float *)(src0_ptr + i0*nb00))); + } else { + v0 = *((global half *)(src0_ptr + i0*nb00)); + } + + if (type_src1 == 1) { + v1 = convert_half(*((global float *)(src1_ptr + i10*nb10))); + } else { + v1 = *((global half *)(src1_ptr + i10*nb10)); + } + + *((global half *)(dst_ptr + i0*nb0)) = v0 + v1; } } kernel void kernel_add_row_f16( - global half4 * src0, + global char * src0, ulong offset0, - global half4 * src1, + global char * src1, ulong offset1, global half4 * dst, ulong offsetd, - int ne + int ne, + int type_src0, + int type_src1 ) { - src0 = (global half4*)((global char*)src0 + offset0); - src1 = (global half4*)((global char*)src1 + offset1); dst = (global half4*)((global char*)dst + offsetd); // This performs better than using %. uint gid = get_global_id(0); uint idx1 = gid - (gid/ne)*ne; // get_global_id(0) % ne - dst[gid] = src0[gid] + src1[idx1]; + + half4 v0, v1; + if (type_src0 == 1) { + global float4* src0_f32 = (global float4*)((global char*)src0 + offset0); + v0 = convert_half4(src0_f32[gid]); + } else { + global half4* src0_f16 = (global half4*)((global char*)src0 + offset0); + v0 = src0_f16[gid]; + } + + if (type_src1 == 1) { + global float4* src1_f32 = (global float4*)((global char*)src1 + offset1); + v1 = convert_half4(src1_f32[idx1]); + } else { + global half4* src1_f16 = (global half4*)((global char*)src1 + offset1); + v1 = src1_f16[idx1]; + } + + dst[gid] = v0 + v1; } diff --git a/ggml/src/ggml-opencl/kernels/flash_attn_f16.cl b/ggml/src/ggml-opencl/kernels/flash_attn_f16.cl new file mode 100644 index 0000000000..fea06867e1 --- /dev/null +++ b/ggml/src/ggml-opencl/kernels/flash_attn_f16.cl @@ -0,0 +1,343 @@ +#pragma OPENCL EXTENSION cl_khr_fp16 : enable + +#define ACC_TYPE float +#define ACC_TYPE4 float4 +#define DATA_TYPE half +#define DATA_TYPE4 half4 +#define CONVERT_ACC4(x) convert_float4(x) +#define CONVERT_DATA4(x) convert_half4(x) + +#define DK_VEC (DK/4) +#define DV_VEC (DV/4) +#define WG_SIZE (BLOCK_M) +#define Q1_WG_SIZE 64 + +inline float get_alibi_slope( + const float max_bias, const uint h, const uint n_head_log2, const float m0, const float m1 +) { + if (max_bias <= 0.0f) { + return 1.0f; + } + const float base = h < n_head_log2 ? m0 : m1; + const int exph = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + return pow(base, exph); +} +__kernel void flash_attn_f16( + const global void * q_void, ulong q_offset, + const global void * k_void, ulong k_offset, + const global void * v_void, ulong v_offset, + global void * o_void, ulong o_offset, + const float scale, + const int n_q, + const int n_kv, + const int is_causal, + const int n_head, + const ulong q_nb1, const ulong q_nb2, const ulong q_nb3, + const ulong k_nb1, const ulong k_nb2, const ulong k_nb3, + const ulong v_nb1, const ulong v_nb2, const ulong v_nb3, + const ulong o_nb1, const ulong o_nb2, const ulong o_nb3, + const float max_bias, + const float m0, + const float m1, + const int n_head_log2, + const float logit_softcap, + const int n_head_kv, + const global void* mask_void, + const ulong mask_offset, + const ulong mask_nb1, + const ulong mask_nb2, + const ulong mask_nb3, + const int mask_ne2, + const int mask_ne3 +) { + const int tid = get_local_id(0); + const int block_q_idx = get_group_id(0); + const int head_batch_idx = get_global_id(1); + + const int my_query_row = block_q_idx * BLOCK_M + tid; + + const int batch_idx = head_batch_idx / n_head; + const int head_idx = head_batch_idx % n_head; + + const int gqa_ratio = n_head / n_head_kv; + const int head_kv_idx = head_idx / gqa_ratio; + + const global char* q_base = (const global char*)q_void + q_offset; + const global char* k_base = (const global char*)k_void + k_offset; + const global char* v_base = (const global char*)v_void + v_offset; + global char* o_base = (global char*)o_void + o_offset; + + const global char* mask_base = NULL; + if (mask_void != NULL) { + const int mask_head_idx = head_idx % mask_ne2; + const int mask_batch_idx = batch_idx % mask_ne3; + mask_base = (const global char*)mask_void + mask_offset + mask_batch_idx * mask_nb3 + mask_head_idx * mask_nb2; + } + + ACC_TYPE4 q_priv[DK_VEC]; + if (my_query_row < n_q) { + const ulong q_row_offset = batch_idx * q_nb3 + head_idx * q_nb2 + my_query_row * q_nb1; + const global DATA_TYPE4* q_ptr = (const global DATA_TYPE4*)(q_base + q_row_offset); + #pragma unroll + for (int i = 0; i < DK_VEC; ++i) { + q_priv[i] = CONVERT_ACC4(q_ptr[i]); + } + } + + ACC_TYPE4 o_acc[DV_VEC]; + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_acc[i] = (ACC_TYPE4)(0.0f); + } + ACC_TYPE m_i = -INFINITY; + ACC_TYPE l_i = 0.0f; + + float slope = get_alibi_slope(max_bias, head_idx, n_head_log2, m0, m1); + + __local DATA_TYPE4 l_k[BLOCK_N][DK_VEC]; + __local DATA_TYPE4 l_v[BLOCK_N][DV_VEC]; + + for (int k_start = 0; k_start < n_kv; k_start += BLOCK_N) { + for (int i = tid; i < BLOCK_N * DK_VEC; i += WG_SIZE) { + const int row = i / DK_VEC; + const int col = i % DK_VEC; + const int k_row_idx = k_start + row; + if (k_row_idx < n_kv) { + const ulong k_row_offset = batch_idx * k_nb3 + head_kv_idx * k_nb2 + k_row_idx * k_nb1; + l_k[row][col] = ((__global DATA_TYPE4*)(k_base + k_row_offset))[col]; + } + } + for (int i = tid; i < BLOCK_N * DV_VEC; i += WG_SIZE) { + const int row = i / DV_VEC; + const int col = i % DV_VEC; + const int v_row_idx = k_start + row; + if (v_row_idx < n_kv) { + const ulong v_row_offset = batch_idx * v_nb3 + head_kv_idx * v_nb2 + v_row_idx * v_nb1; + l_v[row][col] = ((__global DATA_TYPE4*)(v_base + v_row_offset))[col]; + } + } + barrier(CLK_LOCAL_MEM_FENCE); + + if (my_query_row >= n_q) { + continue; + } + + for (int j = 0; j < BLOCK_N; j += 2) { + const int k_row0 = k_start + j; + const int k_row1 = k_start + j + 1; + + ACC_TYPE4 dot_acc0 = (ACC_TYPE4)(0.0f); + ACC_TYPE4 dot_acc1 = (ACC_TYPE4)(0.0f); + #pragma unroll + for (int k = 0; k < DK_VEC; k++) { + dot_acc0 = mad(q_priv[k], CONVERT_ACC4(l_k[j][k]), dot_acc0); + dot_acc1 = mad(q_priv[k], CONVERT_ACC4(l_k[j+1][k]), dot_acc1); + } + ACC_TYPE score0 = (dot_acc0.s0 + dot_acc0.s1 + dot_acc0.s2 + dot_acc0.s3) * scale; + ACC_TYPE score1 = (dot_acc1.s0 + dot_acc1.s1 + dot_acc1.s2 + dot_acc1.s3) * scale; + + if (is_causal) { + if (k_row0 > (n_kv - n_q + my_query_row)) score0 = -INFINITY; + if (k_row1 > (n_kv - n_q + my_query_row)) score1 = -INFINITY; + } + + if (k_row0 >= n_kv) score0 = -INFINITY; + if (k_row1 >= n_kv) score1 = -INFINITY; + + if (mask_base != NULL) { + const global DATA_TYPE* mask_ptr = (const global DATA_TYPE*)(mask_base + my_query_row * mask_nb1); + if (k_row0 < n_kv) score0 += slope * (ACC_TYPE)mask_ptr[k_row0]; + if (k_row1 < n_kv) score1 += slope * (ACC_TYPE)mask_ptr[k_row1]; + } + + if (logit_softcap > 0.0f) { + score0 = logit_softcap * tanh(score0 / logit_softcap); + score1 = logit_softcap * tanh(score1 / logit_softcap); + } + + const ACC_TYPE m_new = max(m_i, max(score0, score1)); + const ACC_TYPE p0 = exp(score0 - m_new); + const ACC_TYPE p1 = exp(score1 - m_new); + const ACC_TYPE scale_prev = exp(m_i - m_new); + + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_acc[i] = o_acc[i] * scale_prev + p0 * CONVERT_ACC4(l_v[j][i]) + p1 * CONVERT_ACC4(l_v[j+1][i]); + } + l_i = l_i * scale_prev + p0 + p1; + m_i = m_new; + } + } + + if (my_query_row < n_q) { + const ulong o_row_offset = batch_idx * o_nb3 + my_query_row * o_nb2 + head_idx * o_nb1; + global DATA_TYPE4 *o_row = (global DATA_TYPE4 *)(o_base + o_row_offset); + if (l_i > 0.0f) { + const ACC_TYPE l_inv = 1.0f / l_i; + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_row[i] = CONVERT_DATA4(o_acc[i] * l_inv); + } + } else { + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_row[i] = (DATA_TYPE4)(0.0f); + } + } + } +} + +__kernel void flash_attn_f16_q1( + const global void * q_void, ulong q_offset, + const global void * k_void, ulong k_offset, + const global void * v_void, ulong v_offset, + global void * o_void, ulong o_offset, + const float scale, + const int n_q, + const int n_kv, + const int is_causal, + const int n_head, + const ulong q_nb1, const ulong q_nb2, const ulong q_nb3, + const ulong k_nb1, const ulong k_nb2, const ulong k_nb3, + const ulong v_nb1, const ulong v_nb2, const ulong v_nb3, + const ulong o_nb1, const ulong o_nb2, const ulong o_nb3, + const float max_bias, + const float m0, + const float m1, + const int n_head_log2, + const float logit_softcap, + const int n_head_kv, + const global void* mask_void, + const ulong mask_offset, + const ulong mask_nb1, + const ulong mask_nb2, + const ulong mask_nb3, + const int mask_ne2, + const int mask_ne3 +) { + const int tid = get_local_id(0); + const int head_batch_idx = get_global_id(1); + + const int batch_idx = head_batch_idx / n_head; + const int head_idx = head_batch_idx % n_head; + + const int gqa_ratio = n_head / n_head_kv; + const int head_kv_idx = head_idx / gqa_ratio; + + const global char* q_base = (const global char*)q_void + q_offset; + const global char* k_base = (const global char*)k_void + k_offset; + const global char* v_base = (const global char*)v_void + v_offset; + global char* o_base = (global char*)o_void + o_offset; + + const global char* mask_base = NULL; + if (mask_void != NULL) { + const int mask_head_idx = head_idx % mask_ne2; + const int mask_batch_idx = batch_idx % mask_ne3; + mask_base = (const global char*)mask_void + mask_offset + mask_batch_idx * mask_nb3 + mask_head_idx * mask_nb2; + } + + ACC_TYPE4 q_priv[DK_VEC]; + const ulong q_row_offset = batch_idx * q_nb3 + head_idx * q_nb2; + const global DATA_TYPE4* q_ptr = (const global DATA_TYPE4*)(q_base + q_row_offset); + #pragma unroll + for (int i = 0; i < DK_VEC; ++i) { + q_priv[i] = CONVERT_ACC4(q_ptr[i]); + } + + float slope = get_alibi_slope(max_bias, head_idx, n_head_log2, m0, m1); + + ACC_TYPE m_i = -INFINITY; + for (int k_idx = tid; k_idx < n_kv; k_idx += Q1_WG_SIZE) { + const ulong k_row_offset = batch_idx * k_nb3 + head_kv_idx * k_nb2 + k_idx * k_nb1; + const global DATA_TYPE4* k_ptr = (const global DATA_TYPE4*)(k_base + k_row_offset); + ACC_TYPE4 dot_acc = (ACC_TYPE4)(0.0f); + #pragma unroll + for (int k = 0; k < DK_VEC; k++) { + dot_acc = mad(q_priv[k], CONVERT_ACC4(k_ptr[k]), dot_acc); + } + ACC_TYPE score = (dot_acc.s0 + dot_acc.s1 + dot_acc.s2 + dot_acc.s3) * scale; + if (mask_base != NULL) { + const global DATA_TYPE* mask_ptr = (const global DATA_TYPE*)(mask_base); + score += slope * (ACC_TYPE)mask_ptr[k_idx]; + } + if (logit_softcap > 0.0f) { + score = logit_softcap * tanh(score / logit_softcap); + } + m_i = max(m_i, score); + } + + __local ACC_TYPE local_m[Q1_WG_SIZE]; + local_m[tid] = m_i; + barrier(CLK_LOCAL_MEM_FENCE); + #pragma unroll + for (int s = Q1_WG_SIZE / 2; s > 0; s >>= 1) { + if (tid < s) local_m[tid] = max(local_m[tid], local_m[tid + s]); + barrier(CLK_LOCAL_MEM_FENCE); + } + const ACC_TYPE m_final = local_m[0]; + + ACC_TYPE4 o_acc[DV_VEC]; + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) o_acc[i] = (ACC_TYPE4)(0.0f); + ACC_TYPE l_i = 0.0f; + + for (int k_idx = tid; k_idx < n_kv; k_idx += Q1_WG_SIZE) { + const ulong k_row_offset = batch_idx * k_nb3 + head_kv_idx * k_nb2 + k_idx * k_nb1; + const ulong v_row_offset = batch_idx * v_nb3 + head_kv_idx * v_nb2 + k_idx * v_nb1; + const global DATA_TYPE4* k_ptr = (const global DATA_TYPE4*)(k_base + k_row_offset); + const global DATA_TYPE4* v_ptr = (const global DATA_TYPE4*)(v_base + v_row_offset); + ACC_TYPE4 dot_acc = (ACC_TYPE4)(0.0f); + #pragma unroll + for (int k = 0; k < DK_VEC; k++) { + dot_acc = mad(q_priv[k], CONVERT_ACC4(k_ptr[k]), dot_acc); + } + ACC_TYPE score = (dot_acc.s0 + dot_acc.s1 + dot_acc.s2 + dot_acc.s3) * scale; + if (mask_base != NULL) { + const global DATA_TYPE* mask_ptr = (const global DATA_TYPE*)(mask_base); + score += slope * (ACC_TYPE)mask_ptr[k_idx]; + } + if (logit_softcap > 0.0f) { + score = logit_softcap * tanh(score / logit_softcap); + } + const ACC_TYPE p = exp(score - m_final); + l_i += p; + #pragma unroll + for (int i = 0; i < DV_VEC; i++) { + o_acc[i] = mad(p, CONVERT_ACC4(v_ptr[i]), o_acc[i]); + } + } + + __local ACC_TYPE local_l[Q1_WG_SIZE]; + __local ACC_TYPE4 local_o_comp[Q1_WG_SIZE]; + local_l[tid] = l_i; + barrier(CLK_LOCAL_MEM_FENCE); + #pragma unroll + for (int s = Q1_WG_SIZE / 2; s > 0; s >>= 1) { + if (tid < s) local_l[tid] += local_l[tid + s]; + barrier(CLK_LOCAL_MEM_FENCE); + } + + const ulong o_row_offset = batch_idx * o_nb3 + head_idx * o_nb1; + global DATA_TYPE4 *o_row = (global DATA_TYPE4 *)(o_base + o_row_offset); + const ACC_TYPE l_final = local_l[0]; + + if (l_final > 0.0f) { + const ACC_TYPE l_inv = 1.0f / l_final; + for (int i = 0; i < DV_VEC; i++) { + local_o_comp[tid] = o_acc[i]; + barrier(CLK_LOCAL_MEM_FENCE); + #pragma unroll + for (int s = Q1_WG_SIZE / 2; s > 0; s >>= 1) { + if (tid < s) local_o_comp[tid] += local_o_comp[tid + s]; + barrier(CLK_LOCAL_MEM_FENCE); + } + if (tid == 0) { + o_row[i] = CONVERT_DATA4(local_o_comp[0] * l_inv); + } + } + } else if (tid == 0) { + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) o_row[i] = (DATA_TYPE4)(0.0f); + } +} diff --git a/ggml/src/ggml-opencl/kernels/flash_attn_f32.cl b/ggml/src/ggml-opencl/kernels/flash_attn_f32.cl new file mode 100644 index 0000000000..2d657327d6 --- /dev/null +++ b/ggml/src/ggml-opencl/kernels/flash_attn_f32.cl @@ -0,0 +1,343 @@ +#pragma OPENCL EXTENSION cl_khr_fp16 : enable + +#define ACC_TYPE float +#define ACC_TYPE4 float4 +#define DATA_TYPE float +#define DATA_TYPE4 float4 +#define CONVERT_ACC4(x) (x) +#define CONVERT_DATA4(x) (x) + +#define DK_VEC (DK/4) +#define DV_VEC (DV/4) +#define WG_SIZE (BLOCK_M) +#define Q1_WG_SIZE 64 + +inline float get_alibi_slope( + const float max_bias, const uint h, const uint n_head_log2, const float m0, const float m1 +) { + if (max_bias <= 0.0f) { + return 1.0f; + } + const float base = h < n_head_log2 ? m0 : m1; + const int exph = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + return pow(base, exph); +} +__kernel void flash_attn_f32( + const global void * q_void, ulong q_offset, + const global void * k_void, ulong k_offset, + const global void * v_void, ulong v_offset, + global void * o_void, ulong o_offset, + const float scale, + const int n_q, + const int n_kv, + const int is_causal, + const int n_head, + const ulong q_nb1, const ulong q_nb2, const ulong q_nb3, + const ulong k_nb1, const ulong k_nb2, const ulong k_nb3, + const ulong v_nb1, const ulong v_nb2, const ulong v_nb3, + const ulong o_nb1, const ulong o_nb2, const ulong o_nb3, + const float max_bias, + const float m0, + const float m1, + const int n_head_log2, + const float logit_softcap, + const int n_head_kv, + const global void* mask_void, + const ulong mask_offset, + const ulong mask_nb1, + const ulong mask_nb2, + const ulong mask_nb3, + const int mask_ne2, + const int mask_ne3 +) { + const int tid = get_local_id(0); + const int block_q_idx = get_group_id(0); + const int head_batch_idx = get_global_id(1); + + const int my_query_row = block_q_idx * BLOCK_M + tid; + + const int batch_idx = head_batch_idx / n_head; + const int head_idx = head_batch_idx % n_head; + + const int gqa_ratio = n_head / n_head_kv; + const int head_kv_idx = head_idx / gqa_ratio; + + const global char* q_base = (const global char*)q_void + q_offset; + const global char* k_base = (const global char*)k_void + k_offset; + const global char* v_base = (const global char*)v_void + v_offset; + global char* o_base = (global char*)o_void + o_offset; + + const global char* mask_base = NULL; + if (mask_void != NULL) { + const int mask_head_idx = head_idx % mask_ne2; + const int mask_batch_idx = batch_idx % mask_ne3; + mask_base = (const global char*)mask_void + mask_offset + mask_batch_idx * mask_nb3 + mask_head_idx * mask_nb2; + } + + ACC_TYPE4 q_priv[DK_VEC]; + if (my_query_row < n_q) { + const ulong q_row_offset = batch_idx * q_nb3 + head_idx * q_nb2 + my_query_row * q_nb1; + const global DATA_TYPE4* q_ptr = (const global DATA_TYPE4*)(q_base + q_row_offset); + #pragma unroll + for (int i = 0; i < DK_VEC; ++i) { + q_priv[i] = CONVERT_ACC4(q_ptr[i]); + } + } + + ACC_TYPE4 o_acc[DV_VEC]; + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_acc[i] = (ACC_TYPE4)(0.0f); + } + ACC_TYPE m_i = -INFINITY; + ACC_TYPE l_i = 0.0f; + + float slope = get_alibi_slope(max_bias, head_idx, n_head_log2, m0, m1); + + __local DATA_TYPE4 l_k[BLOCK_N][DK_VEC]; + __local DATA_TYPE4 l_v[BLOCK_N][DV_VEC]; + + for (int k_start = 0; k_start < n_kv; k_start += BLOCK_N) { + for (int i = tid; i < BLOCK_N * DK_VEC; i += WG_SIZE) { + const int row = i / DK_VEC; + const int col = i % DK_VEC; + const int k_row_idx = k_start + row; + if (k_row_idx < n_kv) { + const ulong k_row_offset = batch_idx * k_nb3 + head_kv_idx * k_nb2 + k_row_idx * k_nb1; + l_k[row][col] = ((__global DATA_TYPE4*)(k_base + k_row_offset))[col]; + } + } + for (int i = tid; i < BLOCK_N * DV_VEC; i += WG_SIZE) { + const int row = i / DV_VEC; + const int col = i % DV_VEC; + const int v_row_idx = k_start + row; + if (v_row_idx < n_kv) { + const ulong v_row_offset = batch_idx * v_nb3 + head_kv_idx * v_nb2 + v_row_idx * v_nb1; + l_v[row][col] = ((__global DATA_TYPE4*)(v_base + v_row_offset))[col]; + } + } + barrier(CLK_LOCAL_MEM_FENCE); + + if (my_query_row >= n_q) { + continue; + } + + for (int j = 0; j < BLOCK_N; j += 2) { + const int k_row0 = k_start + j; + const int k_row1 = k_start + j + 1; + + ACC_TYPE4 dot_acc0 = (ACC_TYPE4)(0.0f); + ACC_TYPE4 dot_acc1 = (ACC_TYPE4)(0.0f); + #pragma unroll + for (int k = 0; k < DK_VEC; k++) { + dot_acc0 = mad(q_priv[k], CONVERT_ACC4(l_k[j][k]), dot_acc0); + dot_acc1 = mad(q_priv[k], CONVERT_ACC4(l_k[j+1][k]), dot_acc1); + } + ACC_TYPE score0 = (dot_acc0.s0 + dot_acc0.s1 + dot_acc0.s2 + dot_acc0.s3) * scale; + ACC_TYPE score1 = (dot_acc1.s0 + dot_acc1.s1 + dot_acc1.s2 + dot_acc1.s3) * scale; + + if (is_causal) { + if (k_row0 > (n_kv - n_q + my_query_row)) score0 = -INFINITY; + if (k_row1 > (n_kv - n_q + my_query_row)) score1 = -INFINITY; + } + + if (k_row0 >= n_kv) score0 = -INFINITY; + if (k_row1 >= n_kv) score1 = -INFINITY; + + if (mask_base != NULL) { + const global DATA_TYPE* mask_ptr = (const global DATA_TYPE*)(mask_base + my_query_row * mask_nb1); + if (k_row0 < n_kv) score0 += slope * (ACC_TYPE)mask_ptr[k_row0]; + if (k_row1 < n_kv) score1 += slope * (ACC_TYPE)mask_ptr[k_row1]; + } + + if (logit_softcap > 0.0f) { + score0 = logit_softcap * tanh(score0 / logit_softcap); + score1 = logit_softcap * tanh(score1 / logit_softcap); + } + + const ACC_TYPE m_new = max(m_i, max(score0, score1)); + const ACC_TYPE p0 = exp(score0 - m_new); + const ACC_TYPE p1 = exp(score1 - m_new); + const ACC_TYPE scale_prev = exp(m_i - m_new); + + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_acc[i] = o_acc[i] * scale_prev + p0 * CONVERT_ACC4(l_v[j][i]) + p1 * CONVERT_ACC4(l_v[j+1][i]); + } + l_i = l_i * scale_prev + p0 + p1; + m_i = m_new; + } + } + + if (my_query_row < n_q) { + const ulong o_row_offset = batch_idx * o_nb3 + my_query_row * o_nb2 + head_idx * o_nb1; + global DATA_TYPE4 *o_row = (global DATA_TYPE4 *)(o_base + o_row_offset); + if (l_i > 0.0f) { + const ACC_TYPE l_inv = 1.0f / l_i; + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_row[i] = CONVERT_DATA4(o_acc[i] * l_inv); + } + } else { + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_row[i] = (DATA_TYPE4)(0.0f); + } + } + } +} + +__kernel void flash_attn_f32_q1( + const global void * q_void, ulong q_offset, + const global void * k_void, ulong k_offset, + const global void * v_void, ulong v_offset, + global void * o_void, ulong o_offset, + const float scale, + const int n_q, + const int n_kv, + const int is_causal, + const int n_head, + const ulong q_nb1, const ulong q_nb2, const ulong q_nb3, + const ulong k_nb1, const ulong k_nb2, const ulong k_nb3, + const ulong v_nb1, const ulong v_nb2, const ulong v_nb3, + const ulong o_nb1, const ulong o_nb2, const ulong o_nb3, + const float max_bias, + const float m0, + const float m1, + const int n_head_log2, + const float logit_softcap, + const int n_head_kv, + const global void* mask_void, + const ulong mask_offset, + const ulong mask_nb1, + const ulong mask_nb2, + const ulong mask_nb3, + const int mask_ne2, + const int mask_ne3 +) { + const int tid = get_local_id(0); + const int head_batch_idx = get_global_id(1); + + const int batch_idx = head_batch_idx / n_head; + const int head_idx = head_batch_idx % n_head; + + const int gqa_ratio = n_head / n_head_kv; + const int head_kv_idx = head_idx / gqa_ratio; + + const global char* q_base = (const global char*)q_void + q_offset; + const global char* k_base = (const global char*)k_void + k_offset; + const global char* v_base = (const global char*)v_void + v_offset; + global char* o_base = (global char*)o_void + o_offset; + + const global char* mask_base = NULL; + if (mask_void != NULL) { + const int mask_head_idx = head_idx % mask_ne2; + const int mask_batch_idx = batch_idx % mask_ne3; + mask_base = (const global char*)mask_void + mask_offset + mask_batch_idx * mask_nb3 + mask_head_idx * mask_nb2; + } + + ACC_TYPE4 q_priv[DK_VEC]; + const ulong q_row_offset = batch_idx * q_nb3 + head_idx * q_nb2; + const global DATA_TYPE4* q_ptr = (const global DATA_TYPE4*)(q_base + q_row_offset); + #pragma unroll + for (int i = 0; i < DK_VEC; ++i) { + q_priv[i] = CONVERT_ACC4(q_ptr[i]); + } + + float slope = get_alibi_slope(max_bias, head_idx, n_head_log2, m0, m1); + + ACC_TYPE m_i = -INFINITY; + for (int k_idx = tid; k_idx < n_kv; k_idx += Q1_WG_SIZE) { + const ulong k_row_offset = batch_idx * k_nb3 + head_kv_idx * k_nb2 + k_idx * k_nb1; + const global DATA_TYPE4* k_ptr = (const global DATA_TYPE4*)(k_base + k_row_offset); + ACC_TYPE4 dot_acc = (ACC_TYPE4)(0.0f); + #pragma unroll + for (int k = 0; k < DK_VEC; k++) { + dot_acc = mad(q_priv[k], CONVERT_ACC4(k_ptr[k]), dot_acc); + } + ACC_TYPE score = (dot_acc.s0 + dot_acc.s1 + dot_acc.s2 + dot_acc.s3) * scale; + if (mask_base != NULL) { + const global DATA_TYPE* mask_ptr = (const global DATA_TYPE*)(mask_base); + score += slope * (ACC_TYPE)mask_ptr[k_idx]; + } + if (logit_softcap > 0.0f) { + score = logit_softcap * tanh(score / logit_softcap); + } + m_i = max(m_i, score); + } + + __local ACC_TYPE local_m[Q1_WG_SIZE]; + local_m[tid] = m_i; + barrier(CLK_LOCAL_MEM_FENCE); + #pragma unroll + for (int s = Q1_WG_SIZE / 2; s > 0; s >>= 1) { + if (tid < s) local_m[tid] = max(local_m[tid], local_m[tid + s]); + barrier(CLK_LOCAL_MEM_FENCE); + } + const ACC_TYPE m_final = local_m[0]; + + ACC_TYPE4 o_acc[DV_VEC]; + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) o_acc[i] = (ACC_TYPE4)(0.0f); + ACC_TYPE l_i = 0.0f; + + for (int k_idx = tid; k_idx < n_kv; k_idx += Q1_WG_SIZE) { + const ulong k_row_offset = batch_idx * k_nb3 + head_kv_idx * k_nb2 + k_idx * k_nb1; + const ulong v_row_offset = batch_idx * v_nb3 + head_kv_idx * v_nb2 + k_idx * v_nb1; + const global DATA_TYPE4* k_ptr = (const global DATA_TYPE4*)(k_base + k_row_offset); + const global DATA_TYPE4* v_ptr = (const global DATA_TYPE4*)(v_base + v_row_offset); + ACC_TYPE4 dot_acc = (ACC_TYPE4)(0.0f); + #pragma unroll + for (int k = 0; k < DK_VEC; k++) { + dot_acc = mad(q_priv[k], CONVERT_ACC4(k_ptr[k]), dot_acc); + } + ACC_TYPE score = (dot_acc.s0 + dot_acc.s1 + dot_acc.s2 + dot_acc.s3) * scale; + if (mask_base != NULL) { + const global DATA_TYPE* mask_ptr = (const global DATA_TYPE*)(mask_base); + score += slope * (ACC_TYPE)mask_ptr[k_idx]; + } + if (logit_softcap > 0.0f) { + score = logit_softcap * tanh(score / logit_softcap); + } + const ACC_TYPE p = exp(score - m_final); + l_i += p; + #pragma unroll + for (int i = 0; i < DV_VEC; i++) { + o_acc[i] = mad(p, CONVERT_ACC4(v_ptr[i]), o_acc[i]); + } + } + + __local ACC_TYPE local_l[Q1_WG_SIZE]; + __local ACC_TYPE4 local_o_comp[Q1_WG_SIZE]; + local_l[tid] = l_i; + barrier(CLK_LOCAL_MEM_FENCE); + #pragma unroll + for (int s = Q1_WG_SIZE / 2; s > 0; s >>= 1) { + if (tid < s) local_l[tid] += local_l[tid + s]; + barrier(CLK_LOCAL_MEM_FENCE); + } + + const ulong o_row_offset = batch_idx * o_nb3 + head_idx * o_nb1; + global DATA_TYPE4 *o_row = (global DATA_TYPE4 *)(o_base + o_row_offset); + const ACC_TYPE l_final = local_l[0]; + + if (l_final > 0.0f) { + const ACC_TYPE l_inv = 1.0f / l_final; + for (int i = 0; i < DV_VEC; i++) { + local_o_comp[tid] = o_acc[i]; + barrier(CLK_LOCAL_MEM_FENCE); + #pragma unroll + for (int s = Q1_WG_SIZE / 2; s > 0; s >>= 1) { + if (tid < s) local_o_comp[tid] += local_o_comp[tid + s]; + barrier(CLK_LOCAL_MEM_FENCE); + } + if (tid == 0) { + o_row[i] = CONVERT_DATA4(local_o_comp[0] * l_inv); + } + } + } else if (tid == 0) { + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) o_row[i] = (DATA_TYPE4)(0.0f); + } +} diff --git a/ggml/src/ggml-opencl/kernels/flash_attn_f32_f16.cl b/ggml/src/ggml-opencl/kernels/flash_attn_f32_f16.cl new file mode 100644 index 0000000000..7067bd2591 --- /dev/null +++ b/ggml/src/ggml-opencl/kernels/flash_attn_f32_f16.cl @@ -0,0 +1,346 @@ +#pragma OPENCL EXTENSION cl_khr_fp16 : enable + +#define ACC_TYPE float +#define ACC_TYPE4 float4 +#define Q_DATA_TYPE4 float4 +#define KV_DATA_TYPE4 half4 +#define O_DATA_TYPE4 float4 +#define MASK_DATA_TYPE half +#define CONVERT_Q_ACC4(x) (x) +#define CONVERT_KV_ACC4(x) convert_float4(x) +#define CONVERT_O_DATA4(x) (x) + +#define DK_VEC (DK/4) +#define DV_VEC (DV/4) +#define WG_SIZE (BLOCK_M) +#define Q1_WG_SIZE 64 + +inline float get_alibi_slope( + const float max_bias, const uint h, const uint n_head_log2, const float m0, const float m1 +) { + if (max_bias <= 0.0f) { + return 1.0f; + } + const float base = h < n_head_log2 ? m0 : m1; + const int exph = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + return pow(base, exph); +} +__kernel void flash_attn_f32_f16( + const global void * q_void, ulong q_offset, + const global void * k_void, ulong k_offset, + const global void * v_void, ulong v_offset, + global void * o_void, ulong o_offset, + const float scale, + const int n_q, + const int n_kv, + const int is_causal, + const int n_head, + const ulong q_nb1, const ulong q_nb2, const ulong q_nb3, + const ulong k_nb1, const ulong k_nb2, const ulong k_nb3, + const ulong v_nb1, const ulong v_nb2, const ulong v_nb3, + const ulong o_nb1, const ulong o_nb2, const ulong o_nb3, + const float max_bias, + const float m0, + const float m1, + const int n_head_log2, + const float logit_softcap, + const int n_head_kv, + const global void* mask_void, + const ulong mask_offset, + const ulong mask_nb1, + const ulong mask_nb2, + const ulong mask_nb3, + const int mask_ne2, + const int mask_ne3 +) { + const int tid = get_local_id(0); + const int block_q_idx = get_group_id(0); + const int head_batch_idx = get_global_id(1); + + const int my_query_row = block_q_idx * BLOCK_M + tid; + + const int batch_idx = head_batch_idx / n_head; + const int head_idx = head_batch_idx % n_head; + + const int gqa_ratio = n_head / n_head_kv; + const int head_kv_idx = head_idx / gqa_ratio; + + const global char* q_base = (const global char*)q_void + q_offset; + const global char* k_base = (const global char*)k_void + k_offset; + const global char* v_base = (const global char*)v_void + v_offset; + global char* o_base = (global char*)o_void + o_offset; + + const global char* mask_base = NULL; + if (mask_void != NULL) { + const int mask_head_idx = head_idx % mask_ne2; + const int mask_batch_idx = batch_idx % mask_ne3; + mask_base = (const global char*)mask_void + mask_offset + mask_batch_idx * mask_nb3 + mask_head_idx * mask_nb2; + } + + ACC_TYPE4 q_priv[DK_VEC]; + if (my_query_row < n_q) { + const ulong q_row_offset = batch_idx * q_nb3 + head_idx * q_nb2 + my_query_row * q_nb1; + const global Q_DATA_TYPE4* q_ptr = (const global Q_DATA_TYPE4*)(q_base + q_row_offset); + #pragma unroll + for (int i = 0; i < DK_VEC; ++i) { + q_priv[i] = CONVERT_Q_ACC4(q_ptr[i]); + } + } + + ACC_TYPE4 o_acc[DV_VEC]; + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_acc[i] = (ACC_TYPE4)(0.0f); + } + ACC_TYPE m_i = -INFINITY; + ACC_TYPE l_i = 0.0f; + + float slope = get_alibi_slope(max_bias, head_idx, n_head_log2, m0, m1); + + __local KV_DATA_TYPE4 l_k[BLOCK_N][DK_VEC]; + __local KV_DATA_TYPE4 l_v[BLOCK_N][DV_VEC]; + + for (int k_start = 0; k_start < n_kv; k_start += BLOCK_N) { + for (int i = tid; i < BLOCK_N * DK_VEC; i += WG_SIZE) { + const int row = i / DK_VEC; + const int col = i % DK_VEC; + const int k_row_idx = k_start + row; + if (k_row_idx < n_kv) { + const ulong k_row_offset = batch_idx * k_nb3 + head_kv_idx * k_nb2 + k_row_idx * k_nb1; + l_k[row][col] = ((__global KV_DATA_TYPE4*)(k_base + k_row_offset))[col]; + } + } + for (int i = tid; i < BLOCK_N * DV_VEC; i += WG_SIZE) { + const int row = i / DV_VEC; + const int col = i % DV_VEC; + const int v_row_idx = k_start + row; + if (v_row_idx < n_kv) { + const ulong v_row_offset = batch_idx * v_nb3 + head_kv_idx * v_nb2 + v_row_idx * v_nb1; + l_v[row][col] = ((__global KV_DATA_TYPE4*)(v_base + v_row_offset))[col]; + } + } + barrier(CLK_LOCAL_MEM_FENCE); + + if (my_query_row >= n_q) { + continue; + } + + for (int j = 0; j < BLOCK_N; j += 2) { + const int k_row0 = k_start + j; + const int k_row1 = k_start + j + 1; + + ACC_TYPE4 dot_acc0 = (ACC_TYPE4)(0.0f); + ACC_TYPE4 dot_acc1 = (ACC_TYPE4)(0.0f); + #pragma unroll + for (int k = 0; k < DK_VEC; k++) { + dot_acc0 = mad(q_priv[k], CONVERT_KV_ACC4(l_k[j][k]), dot_acc0); + dot_acc1 = mad(q_priv[k], CONVERT_KV_ACC4(l_k[j+1][k]), dot_acc1); + } + ACC_TYPE score0 = (dot_acc0.s0 + dot_acc0.s1 + dot_acc0.s2 + dot_acc0.s3) * scale; + ACC_TYPE score1 = (dot_acc1.s0 + dot_acc1.s1 + dot_acc1.s2 + dot_acc1.s3) * scale; + + if (is_causal) { + if (k_row0 > (n_kv - n_q + my_query_row)) score0 = -INFINITY; + if (k_row1 > (n_kv - n_q + my_query_row)) score1 = -INFINITY; + } + + if (k_row0 >= n_kv) score0 = -INFINITY; + if (k_row1 >= n_kv) score1 = -INFINITY; + + if (mask_base != NULL) { + const global MASK_DATA_TYPE* mask_ptr = (const global MASK_DATA_TYPE*)(mask_base + my_query_row * mask_nb1); + if (k_row0 < n_kv) score0 += slope * (ACC_TYPE)mask_ptr[k_row0]; + if (k_row1 < n_kv) score1 += slope * (ACC_TYPE)mask_ptr[k_row1]; + } + + if (logit_softcap > 0.0f) { + score0 = logit_softcap * tanh(score0 / logit_softcap); + score1 = logit_softcap * tanh(score1 / logit_softcap); + } + + const ACC_TYPE m_new = max(m_i, max(score0, score1)); + const ACC_TYPE p0 = exp(score0 - m_new); + const ACC_TYPE p1 = exp(score1 - m_new); + const ACC_TYPE scale_prev = exp(m_i - m_new); + + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_acc[i] = o_acc[i] * scale_prev + p0 * CONVERT_KV_ACC4(l_v[j][i]) + p1 * CONVERT_KV_ACC4(l_v[j+1][i]); + } + l_i = l_i * scale_prev + p0 + p1; + m_i = m_new; + } + } + + if (my_query_row < n_q) { + const ulong o_row_offset = batch_idx * o_nb3 + my_query_row * o_nb2 + head_idx * o_nb1; + global O_DATA_TYPE4 *o_row = (global O_DATA_TYPE4 *)(o_base + o_row_offset); + if (l_i > 0.0f) { + const ACC_TYPE l_inv = 1.0f / l_i; + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_row[i] = CONVERT_O_DATA4(o_acc[i] * l_inv); + } + } else { + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) { + o_row[i] = (O_DATA_TYPE4)(0.0f); + } + } + } +} + +__kernel void flash_attn_f32_f16_q1( + const global void * q_void, ulong q_offset, + const global void * k_void, ulong k_offset, + const global void * v_void, ulong v_offset, + global void * o_void, ulong o_offset, + const float scale, + const int n_q, + const int n_kv, + const int is_causal, + const int n_head, + const ulong q_nb1, const ulong q_nb2, const ulong q_nb3, + const ulong k_nb1, const ulong k_nb2, const ulong k_nb3, + const ulong v_nb1, const ulong v_nb2, const ulong v_nb3, + const ulong o_nb1, const ulong o_nb2, const ulong o_nb3, + const float max_bias, + const float m0, + const float m1, + const int n_head_log2, + const float logit_softcap, + const int n_head_kv, + const global void* mask_void, + const ulong mask_offset, + const ulong mask_nb1, + const ulong mask_nb2, + const ulong mask_nb3, + const int mask_ne2, + const int mask_ne3 +) { + const int tid = get_local_id(0); + const int head_batch_idx = get_global_id(1); + + const int batch_idx = head_batch_idx / n_head; + const int head_idx = head_batch_idx % n_head; + + const int gqa_ratio = n_head / n_head_kv; + const int head_kv_idx = head_idx / gqa_ratio; + + const global char* q_base = (const global char*)q_void + q_offset; + const global char* k_base = (const global char*)k_void + k_offset; + const global char* v_base = (const global char*)v_void + v_offset; + global char* o_base = (global char*)o_void + o_offset; + + const global char* mask_base = NULL; + if (mask_void != NULL) { + const int mask_head_idx = head_idx % mask_ne2; + const int mask_batch_idx = batch_idx % mask_ne3; + mask_base = (const global char*)mask_void + mask_offset + mask_batch_idx * mask_nb3 + mask_head_idx * mask_nb2; + } + + ACC_TYPE4 q_priv[DK_VEC]; + const ulong q_row_offset = batch_idx * q_nb3 + head_idx * q_nb2; + const global Q_DATA_TYPE4* q_ptr = (const global Q_DATA_TYPE4*)(q_base + q_row_offset); + #pragma unroll + for (int i = 0; i < DK_VEC; ++i) { + q_priv[i] = CONVERT_Q_ACC4(q_ptr[i]); + } + + float slope = get_alibi_slope(max_bias, head_idx, n_head_log2, m0, m1); + + ACC_TYPE m_i = -INFINITY; + for (int k_idx = tid; k_idx < n_kv; k_idx += Q1_WG_SIZE) { + const ulong k_row_offset = batch_idx * k_nb3 + head_kv_idx * k_nb2 + k_idx * k_nb1; + const global KV_DATA_TYPE4* k_ptr = (const global KV_DATA_TYPE4*)(k_base + k_row_offset); + ACC_TYPE4 dot_acc = (ACC_TYPE4)(0.0f); + #pragma unroll + for (int k = 0; k < DK_VEC; k++) { + dot_acc = mad(q_priv[k], CONVERT_KV_ACC4(k_ptr[k]), dot_acc); + } + ACC_TYPE score = (dot_acc.s0 + dot_acc.s1 + dot_acc.s2 + dot_acc.s3) * scale; + if (mask_base != NULL) { + const global MASK_DATA_TYPE* mask_ptr = (const global MASK_DATA_TYPE*)(mask_base); + score += slope * (ACC_TYPE)mask_ptr[k_idx]; + } + if (logit_softcap > 0.0f) { + score = logit_softcap * tanh(score / logit_softcap); + } + m_i = max(m_i, score); + } + + __local ACC_TYPE local_m[Q1_WG_SIZE]; + local_m[tid] = m_i; + barrier(CLK_LOCAL_MEM_FENCE); + #pragma unroll + for (int s = Q1_WG_SIZE / 2; s > 0; s >>= 1) { + if (tid < s) local_m[tid] = max(local_m[tid], local_m[tid + s]); + barrier(CLK_LOCAL_MEM_FENCE); + } + const ACC_TYPE m_final = local_m[0]; + + ACC_TYPE4 o_acc[DV_VEC]; + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) o_acc[i] = (ACC_TYPE4)(0.0f); + ACC_TYPE l_i = 0.0f; + + for (int k_idx = tid; k_idx < n_kv; k_idx += Q1_WG_SIZE) { + const ulong k_row_offset = batch_idx * k_nb3 + head_kv_idx * k_nb2 + k_idx * k_nb1; + const ulong v_row_offset = batch_idx * v_nb3 + head_kv_idx * v_nb2 + k_idx * v_nb1; + const global KV_DATA_TYPE4* k_ptr = (const global KV_DATA_TYPE4*)(k_base + k_row_offset); + const global KV_DATA_TYPE4* v_ptr = (const global KV_DATA_TYPE4*)(v_base + v_row_offset); + ACC_TYPE4 dot_acc = (ACC_TYPE4)(0.0f); + #pragma unroll + for (int k = 0; k < DK_VEC; k++) { + dot_acc = mad(q_priv[k], CONVERT_KV_ACC4(k_ptr[k]), dot_acc); + } + ACC_TYPE score = (dot_acc.s0 + dot_acc.s1 + dot_acc.s2 + dot_acc.s3) * scale; + if (mask_base != NULL) { + const global MASK_DATA_TYPE* mask_ptr = (const global MASK_DATA_TYPE*)(mask_base); + score += slope * (ACC_TYPE)mask_ptr[k_idx]; + } + if (logit_softcap > 0.0f) { + score = logit_softcap * tanh(score / logit_softcap); + } + const ACC_TYPE p = exp(score - m_final); + l_i += p; + #pragma unroll + for (int i = 0; i < DV_VEC; i++) { + o_acc[i] = mad(p, CONVERT_KV_ACC4(v_ptr[i]), o_acc[i]); + } + } + + __local ACC_TYPE local_l[Q1_WG_SIZE]; + __local ACC_TYPE4 local_o_comp[Q1_WG_SIZE]; + local_l[tid] = l_i; + barrier(CLK_LOCAL_MEM_FENCE); + #pragma unroll + for (int s = Q1_WG_SIZE / 2; s > 0; s >>= 1) { + if (tid < s) local_l[tid] += local_l[tid + s]; + barrier(CLK_LOCAL_MEM_FENCE); + } + + const ulong o_row_offset = batch_idx * o_nb3 + head_idx * o_nb1; + global O_DATA_TYPE4 *o_row = (global O_DATA_TYPE4 *)(o_base + o_row_offset); + const ACC_TYPE l_final = local_l[0]; + + if (l_final > 0.0f) { + const ACC_TYPE l_inv = 1.0f / l_final; + for (int i = 0; i < DV_VEC; i++) { + local_o_comp[tid] = o_acc[i]; + barrier(CLK_LOCAL_MEM_FENCE); + #pragma unroll + for (int s = Q1_WG_SIZE / 2; s > 0; s >>= 1) { + if (tid < s) local_o_comp[tid] += local_o_comp[tid + s]; + barrier(CLK_LOCAL_MEM_FENCE); + } + if (tid == 0) { + o_row[i] = CONVERT_O_DATA4(local_o_comp[0] * l_inv); + } + } + } else if (tid == 0) { + #pragma unroll + for (int i = 0; i < DV_VEC; ++i) o_row[i] = (O_DATA_TYPE4)(0.0f); + } +} diff --git a/ggml/src/ggml-opencl/kernels/mul_mv_id_mxfp4_f32.cl b/ggml/src/ggml-opencl/kernels/mul_mv_id_mxfp4_f32.cl new file mode 100644 index 0000000000..d50bd1fc42 --- /dev/null +++ b/ggml/src/ggml-opencl/kernels/mul_mv_id_mxfp4_f32.cl @@ -0,0 +1,189 @@ +#pragma OPENCL EXTENSION cl_khr_fp16 : enable + +#ifdef cl_intel_subgroups +#pragma OPENCL EXTENSION cl_intel_subgroups : enable +#else +#pragma OPENCL EXTENSION cl_khr_subgroups : enable +#endif + +#ifdef cl_intel_required_subgroup_size +#pragma OPENCL EXTENSION cl_intel_required_subgroup_size : enable +#define INTEL_GPU 1 +#define REQD_SUBGROUP_SIZE_16 __attribute__((intel_reqd_sub_group_size(16))) +#define REQD_SUBGROUP_SIZE_32 __attribute__((intel_reqd_sub_group_size(32))) +#elif defined(cl_qcom_reqd_sub_group_size) +#pragma OPENCL EXTENSION cl_qcom_reqd_sub_group_size : enable +#define ADRENO_GPU 1 +#define REQD_SUBGROUP_SIZE_64 __attribute__((qcom_reqd_sub_group_size("half"))) +#define REQD_SUBGROUP_SIZE_128 __attribute__((qcom_reqd_sub_group_size("full"))) +#endif + +#define QK_MXFP4 32 +typedef struct { + uchar e; // E8M0 + uchar qs[QK_MXFP4/2]; +} block_mxfp4; + +constant static float kvalues_mxfp4_f[16] = { + 0, .5f, 1.f, 1.5f, 2.f, 3.f, 4.f, 6.f, -0, -.5f, -1.f, -1.5f, -2.f, -3.f, -4.f, -6.f +}; + +static inline float e8m0_to_fp32(uchar x) { + int bits; + + if (x == 0) { + bits = 0x00400000; + } else { + bits = (uint) x << 23; + } + + return as_float(bits); +} + +#ifdef INTEL_GPU +#define N_R0_MXFP4 2 // number of rows each subgroup works on +#define N_SG_MXFP4 2 // number of subgroups in a work group +#define N_SIMDWIDTH 16 // subgroup size +#elif defined (ADRENO_GPU) +#define N_R0_MXFP4 2 +#define N_SG_MXFP4 2 +#define N_SIMDWIDTH 64 +#endif + +inline void mul_mv_mxfp4_f32( + global char * src0, + global char * src1, + global char * dst, + int ne00, + ulong nb01, + ulong nb02, + ulong nb03, + int ne12, + ulong nb11, + ulong nb12, + ulong nb13, + int ne0, + int ne1, + int r2, + int r3, + local char * shmem +) { + local float * shmem_f32 = (local float *) shmem; + int nb = ne00/QK_MXFP4; + + int r0 = get_group_id(0); + int r1 = get_group_id(1); + int im = 0; + + int first_row = (r0 * N_SG_MXFP4 + get_sub_group_id()) * N_R0_MXFP4; + + uint i12 = im%ne12; + uint i13 = im/ne12; + + ulong offset_src0 = first_row*nb01 + (i12/r2)*nb02 + (i13/r3)*nb03; + ulong offset_src1 = r1*nb11 + (i12 )*nb12 + (i13 )*nb13; + + global block_mxfp4 * x = (global block_mxfp4 *) (src0 + offset_src0); + global float * y = (global float *) (src1 + offset_src1); + + const short ix = get_sub_group_local_id()/2; // 0...15 + const short it = get_sub_group_local_id()%2; // 0 or 1 + + shmem_f32[get_sub_group_local_id()] = kvalues_mxfp4_f[get_sub_group_local_id()%16]; + barrier(CLK_LOCAL_MEM_FENCE); + + float4 yl[4]; + float sumf[N_R0_MXFP4] = {0.f}; + + global float * yb = y + ix * QK_MXFP4 + it * 8; + + for (int ib = ix; ib < nb; ib += N_SIMDWIDTH/2) { + global float4 * y4 = (global float4 *)yb; + yl[0] = y4[0]; + yl[1] = y4[4]; + yl[2] = y4[1]; + yl[3] = y4[5]; + + for (short row = 0; row < N_R0_MXFP4; row++) { + global block_mxfp4 * xb = x + row*nb + ib; + global uchar * q2 = (global uchar *)(xb->qs + 8*it); + + float4 acc1 = yl[0]*(float4)(shmem_f32[q2[0] & 0x0F], shmem_f32[q2[1] & 0x0F], shmem_f32[q2[2] & 0x0F], shmem_f32[q2[3] & 0x0F]); + float4 acc2 = yl[1]*(float4)(shmem_f32[q2[0] >> 4 ], shmem_f32[q2[1] >> 4 ], shmem_f32[q2[2] >> 4 ], shmem_f32[q2[3] >> 4 ]); + float4 acc3 = yl[2]*(float4)(shmem_f32[q2[4] & 0x0F], shmem_f32[q2[5] & 0x0F], shmem_f32[q2[6] & 0x0F], shmem_f32[q2[7] & 0x0F]); + float4 acc4 = yl[3]*(float4)(shmem_f32[q2[4] >> 4 ], shmem_f32[q2[5] >> 4 ], shmem_f32[q2[6] >> 4 ], shmem_f32[q2[7] >> 4 ]); + + acc1 = (acc1 + acc3) + (acc2 + acc4); + + sumf[row] += e8m0_to_fp32(xb->e) * ((acc1.s0 + acc1.s1) + (acc1.s2 + acc1.s3)); + } + + yb += (N_SIMDWIDTH/2) * QK_MXFP4; + } + + global float * dst_f32 = (global float *) dst + (ulong)im*ne0*ne1 + (ulong)r1*ne0; + + for (int row = 0; row < N_R0_MXFP4 && first_row + row < ne0; ++row) { + float sum_all = sub_group_reduce_add(sumf[row]); + if (get_sub_group_local_id() == 0) { + dst_f32[first_row + row] = sum_all; + } + } +} + +#ifdef INTEL_GPU +REQD_SUBGROUP_SIZE_16 +#elif defined (ADRENO_GPU) +REQD_SUBGROUP_SIZE_64 +#endif +kernel void kernel_mul_mv_id_mxfp4_f32( + global char * src0, + ulong offset0, + global char * src1, + ulong offset1, + global char * src2, + ulong offset2, + global char * dst, + ulong offsetd, + int ne00, + ulong nb01, + ulong nb02, + ulong nb03, + int ne11, + int ne12, + ulong nb11, + ulong nb12, + ulong nb13, + int ne20, + int ne21, + ulong nb21, + int ne0, + int ne1, + int r2, + int r3, + local char * shmem +) { + src0 = (global char *)((global char *)src0 + offset0); + src1 = (global char *)((global char *)src1 + offset1); + src2 = (global char *)((global char *)src2 + offset2); + dst = (global char *)((global char *)dst + offsetd); + + const int iid1 = get_group_id(2)/ne20; + const int idx = get_group_id(2)%ne20; + + int i02 = ((global int *) (src2 + iid1*nb21))[idx]; + + int i11 = idx % ne11; + int i12 = iid1; + + int i1 = idx; + int i2 = i12; + + global char * src0_cur = src0 + i02*nb02; + global char * src1_cur = src1 + i11*nb11 + i12*nb12; + + global char * dst_cur = dst + (i1*ne0 + i2*ne1*ne0)*sizeof(float); + + mul_mv_mxfp4_f32(src0_cur, src1_cur, dst_cur, + ne00, nb01, nb02, nb03, ne12, nb11, nb12, nb13, ne0, ne1, r2, r3, shmem); +} diff --git a/ggml/src/ggml-opencl/kernels/mul_mv_mxfp4_f32.cl b/ggml/src/ggml-opencl/kernels/mul_mv_mxfp4_f32.cl new file mode 100644 index 0000000000..9a4d4b9bad --- /dev/null +++ b/ggml/src/ggml-opencl/kernels/mul_mv_mxfp4_f32.cl @@ -0,0 +1,144 @@ +#pragma OPENCL EXTENSION cl_khr_fp16 : enable + +#ifdef cl_intel_subgroups +#pragma OPENCL EXTENSION cl_intel_subgroups : enable +#else +#pragma OPENCL EXTENSION cl_khr_subgroups : enable +#endif + +#ifdef cl_intel_required_subgroup_size +#pragma OPENCL EXTENSION cl_intel_required_subgroup_size : enable +#define INTEL_GPU 1 +#define REQD_SUBGROUP_SIZE_16 __attribute__((intel_reqd_sub_group_size(16))) +#define REQD_SUBGROUP_SIZE_32 __attribute__((intel_reqd_sub_group_size(32))) +#elif defined(cl_qcom_reqd_sub_group_size) +#pragma OPENCL EXTENSION cl_qcom_reqd_sub_group_size : enable +#define ADRENO_GPU 1 +#define REQD_SUBGROUP_SIZE_64 __attribute__((qcom_reqd_sub_group_size("half"))) +#define REQD_SUBGROUP_SIZE_128 __attribute__((qcom_reqd_sub_group_size("full"))) +#endif + +#define QK_MXFP4 32 +typedef struct { + uchar e; // E8M0 + uchar qs[QK_MXFP4/2]; +} block_mxfp4; + +constant static float kvalues_mxfp4_f[16] = { + 0, .5f, 1.f, 1.5f, 2.f, 3.f, 4.f, 6.f, -0, -.5f, -1.f, -1.5f, -2.f, -3.f, -4.f, -6.f +}; + +static inline float e8m0_to_fp32(uchar x) { + int bits; + + if (x == 0) { + bits = 0x00400000; + } else { + bits = (uint) x << 23; + } + + return as_float(bits); +} + +#ifdef INTEL_GPU +#define N_R0_MXFP4 2 // number of rows each subgroup works on +#define N_SG_MXFP4 2 // number of subgroups in a work group +#define N_SIMDWIDTH 16 // subgroup size +#elif defined (ADRENO_GPU) +#define N_R0_MXFP4 2 +#define N_SG_MXFP4 2 +#define N_SIMDWIDTH 64 +#endif + +#ifdef INTEL_GPU +REQD_SUBGROUP_SIZE_16 +#elif defined (ADRENO_GPU) +REQD_SUBGROUP_SIZE_64 +#endif +kernel void kernel_mul_mv_mxfp4_f32( + global char * src0, + ulong offset0, + global char * src1, + ulong offset1, + global char * dst, + ulong offsetd, + int ne00, + ulong nb01, + ulong nb02, + ulong nb03, + int ne12, + ulong nb11, + ulong nb12, + ulong nb13, + int ne0, + int ne1, + int r2, + int r3, + local char * shmem +) { + src0 = (global char*)((global char*)src0 + offset0); + src1 = (global char*)((global char*)src1 + offset1); + dst = (global char*)((global char*)dst + offsetd); + + local float * shmem_f32 = (local float *) shmem; + int nb = ne00/QK_MXFP4; + + int r0 = get_group_id(0); + int r1 = get_group_id(1); + int im = get_group_id(2); + + int first_row = (r0 * N_SG_MXFP4 + get_sub_group_id()) * N_R0_MXFP4; + + uint i12 = im%ne12; + uint i13 = im/ne12; + + ulong offset_src0 = first_row*nb01 + (i12/r2)*nb02 + (i13/r3)*nb03; + ulong offset_src1 = r1*nb11 + (i12 )*nb12 + (i13 )*nb13; + + global block_mxfp4 * x = (global block_mxfp4 *) (src0 + offset_src0); + global float * y = (global float *) (src1 + offset_src1); + + const short ix = get_sub_group_local_id()/2; // 0...15 + const short it = get_sub_group_local_id()%2; // 0 or 1 + + shmem_f32[get_sub_group_local_id()] = kvalues_mxfp4_f[get_sub_group_local_id()%16]; + barrier(CLK_LOCAL_MEM_FENCE); + + float4 yl[4]; + float sumf[N_R0_MXFP4] = {0.f}; + + global float * yb = y + ix * QK_MXFP4 + it * 8; + + for (int ib = ix; ib < nb; ib += N_SIMDWIDTH/2) { + global float4 * y4 = (global float4 *)yb; + yl[0] = y4[0]; + yl[1] = y4[4]; + yl[2] = y4[1]; + yl[3] = y4[5]; + + for (short row = 0; row < N_R0_MXFP4; row++) { + global block_mxfp4 * xb = x + row*nb + ib; + global uchar * q2 = (global uchar *)(xb->qs + 8*it); + + float4 acc1 = yl[0]*(float4)(shmem_f32[q2[0] & 0x0F], shmem_f32[q2[1] & 0x0F], shmem_f32[q2[2] & 0x0F], shmem_f32[q2[3] & 0x0F]); + float4 acc2 = yl[1]*(float4)(shmem_f32[q2[0] >> 4 ], shmem_f32[q2[1] >> 4 ], shmem_f32[q2[2] >> 4 ], shmem_f32[q2[3] >> 4 ]); + float4 acc3 = yl[2]*(float4)(shmem_f32[q2[4] & 0x0F], shmem_f32[q2[5] & 0x0F], shmem_f32[q2[6] & 0x0F], shmem_f32[q2[7] & 0x0F]); + float4 acc4 = yl[3]*(float4)(shmem_f32[q2[4] >> 4 ], shmem_f32[q2[5] >> 4 ], shmem_f32[q2[6] >> 4 ], shmem_f32[q2[7] >> 4 ]); + + acc1 = (acc1 + acc3) + (acc2 + acc4); + + sumf[row] += e8m0_to_fp32(xb->e) * ((acc1.s0 + acc1.s1) + (acc1.s2 + acc1.s3)); + } + + yb += (N_SIMDWIDTH/2) * QK_MXFP4; + } + + global float * dst_f32 = (global float *) dst + (ulong)im*ne0*ne1 + (ulong)r1*ne0; + + for (int row = 0; row < N_R0_MXFP4 && first_row + row < ne0; ++row) { + float sum_all = sub_group_reduce_add(sumf[row]); + if (get_sub_group_local_id() == 0) { + dst_f32[first_row + row] = sum_all; + } + } +} diff --git a/ggml/src/ggml-opencl/kernels/transpose.cl b/ggml/src/ggml-opencl/kernels/transpose.cl index a11490b304..536dd560a9 100644 --- a/ggml/src/ggml-opencl/kernels/transpose.cl +++ b/ggml/src/ggml-opencl/kernels/transpose.cl @@ -24,6 +24,26 @@ kernel void kernel_transpose_16( write_imageh(output, (i_2+3)*rows+j, (half4)(temp0.s3, temp1.s3, temp2.s3, temp3.s3)); } +// Padded kernel for irregular shape +kernel void kernel_transpose_16_4x1( + __read_only image1d_buffer_t input, + __write_only image1d_buffer_t output, + const uint rows, + const uint cols +) { + + const int i = get_global_id(0); + const int j = get_global_id(1); + const int j_2 = j << 2; + + half temp0 = read_imageh(input, (j_2 + 0) * cols + i).x; + half temp1 = read_imageh(input, (j_2 + 1) * cols + i).x; + half temp2 = read_imageh(input, (j_2 + 2) * cols + i).x; + half temp3 = read_imageh(input, (j_2 + 3) * cols + i).x; + + write_imageh(output, i * rows + j, (half4)(temp0, temp1, temp2, temp3)); +} + // 32-bit transpose, loading/storing a 4x4 tile of elements kernel void kernel_transpose_32( __read_only image1d_buffer_t input, diff --git a/ggml/src/ggml-opt.cpp b/ggml/src/ggml-opt.cpp index a3c82d6757..e078ad14a3 100644 --- a/ggml/src/ggml-opt.cpp +++ b/ggml/src/ggml-opt.cpp @@ -64,9 +64,11 @@ struct ggml_opt_context { int32_t opt_i = 0; bool loss_per_datapoint = false; - ggml_opt_get_optimizer_params get_opt_pars = nullptr; - void * get_opt_pars_ud = nullptr; - struct ggml_tensor * adamw_params = nullptr; + ggml_opt_get_optimizer_params get_opt_pars = nullptr; + void * get_opt_pars_ud = nullptr; + struct ggml_tensor * opt_step_params = nullptr; // Stores output of get_opt_pars. + + enum ggml_opt_optimizer_type optimizer = GGML_OPT_OPTIMIZER_TYPE_ADAMW; }; struct ggml_opt_result { @@ -229,9 +231,13 @@ struct ggml_opt_optimizer_params ggml_opt_get_default_optimizer_params(void * us result.adamw.eps = 1e-8f; result.adamw.wd = 0.0f; + result.sgd.alpha = 1e-3f; + result.sgd.wd = 0.0f; + return result; } + struct ggml_opt_optimizer_params ggml_opt_get_constant_optimizer_params(void * userdata) { return *((struct ggml_opt_optimizer_params *) userdata); } @@ -249,6 +255,7 @@ struct ggml_opt_params ggml_opt_default_params( /*opt_period =*/ 1, /*get_opt_pars =*/ ggml_opt_get_default_optimizer_params, /*get_opt_pars_ud =*/ nullptr, + /*optimizer =*/ GGML_OPT_OPTIMIZER_TYPE_ADAMW, }; } @@ -316,9 +323,14 @@ static void ggml_opt_build(ggml_opt_context_t opt_ctx) { GGML_ASSERT(opt_ctx->ctx_compute && "no compute context set, either use static graphs or set one with ggml_opt_prepare_alloc"); GGML_ASSERT((!opt_ctx->static_graphs || opt_ctx->inputs->data) && "when using static graphs the inputs must be allocated statically"); + const enum ggml_opt_optimizer_type optimizer = opt_ctx->optimizer; + const bool accumulate = opt_ctx->build_type_alloc >= GGML_OPT_BUILD_TYPE_GRAD && !(opt_ctx->static_graphs && opt_ctx->build_type_alloc == GGML_OPT_BUILD_TYPE_OPT && opt_ctx->opt_period == 1); + const bool need_momenta = opt_ctx->build_type_alloc == GGML_OPT_BUILD_TYPE_OPT && + opt_ctx->optimizer == GGML_OPT_OPTIMIZER_TYPE_ADAMW; + ggml_set_input(opt_ctx->inputs); ggml_set_output(opt_ctx->outputs); @@ -340,8 +352,7 @@ static void ggml_opt_build(ggml_opt_context_t opt_ctx) { // - pred (if using static graphs) // - ncorrect (if using static graphs, 2 tensors). constexpr size_t n_loss = 1; - const size_t tensors_per_param = (accumulate ? 1 : 0) + - (opt_ctx->build_type_alloc == GGML_OPT_BUILD_TYPE_OPT ? 2 : 0); + const size_t tensors_per_param = (accumulate ? 1 : 0) + (need_momenta ? 2 : 0); const size_t tensors_const = opt_ctx->static_graphs ? 9 : 0; const size_t size_meta = (n_loss + tensors_per_param*n_param + tensors_const) * ggml_tensor_overhead(); struct ggml_init_params params = { @@ -458,7 +469,7 @@ static void ggml_opt_build(ggml_opt_context_t opt_ctx) { } } - if (opt_ctx->build_type_alloc >= GGML_OPT_BUILD_TYPE_OPT) { + if (need_momenta && opt_ctx->build_type_alloc >= GGML_OPT_BUILD_TYPE_OPT) { opt_ctx->grad_m.resize(n_nodes); opt_ctx->grad_v.resize(n_nodes); for (int i = 0; i < n_nodes; ++i) { @@ -492,23 +503,36 @@ static void ggml_opt_build(ggml_opt_context_t opt_ctx) { // gb_opt == graph backward optimize, forward pass, then backward pass to calculate gradients, then optimizer step. opt_ctx->gb_opt = ggml_graph_dup(opt_ctx->ctx_compute, opt_ctx->gb_grad, /*force_grads =*/ true); - opt_ctx->adamw_params = ggml_new_tensor_1d(opt_ctx->ctx_cpu, GGML_TYPE_F32, 7); - ggml_set_input(opt_ctx->adamw_params); - ggml_set_name(opt_ctx->adamw_params, "adamw_params"); - + opt_ctx->opt_step_params = ggml_new_tensor_1d(opt_ctx->ctx_cpu, GGML_TYPE_F32, need_momenta ? 7 : 2); + ggml_tensor * adamw_params = opt_ctx->opt_step_params; + ggml_set_input(adamw_params); + const char * optimizer_name = ggml_opt_optimizer_name(opt_ctx->optimizer); + ggml_format_name(adamw_params, "%s_params", optimizer_name); for (int i = opt_ctx->gf->n_nodes-1; i >= 0; --i) { struct ggml_tensor * node = opt_ctx->gb_opt->nodes[i]; struct ggml_tensor * grad = ggml_graph_get_grad(opt_ctx->gb_opt, node); if (grad && (node->flags & GGML_TENSOR_FLAG_PARAM)) { - struct ggml_tensor * m = opt_ctx->grad_m[i]; - struct ggml_tensor * v = opt_ctx->grad_v[i]; - struct ggml_tensor * opt_step = ggml_opt_step_adamw(opt_ctx->ctx_compute, node, grad, m, v, opt_ctx->adamw_params); - - ggml_set_name(m, (std::string("AdamW m for ") + std::string(node->name)).c_str()); - ggml_set_name(v, (std::string("AdamW v for ") + std::string(node->name)).c_str()); - ggml_set_name(opt_step, (std::string("AdamW step for ") + std::string(node->name)).c_str()); - + struct ggml_tensor * m = nullptr; + struct ggml_tensor * v = nullptr; + if (need_momenta) { + m = opt_ctx->grad_m[i]; + v = opt_ctx->grad_v[i]; + ggml_format_name(m, "AdamW m for %s", node->name); + ggml_format_name(v, "AdamW v for %s", node->name); + } + struct ggml_tensor * opt_step; + switch (optimizer) { + case GGML_OPT_OPTIMIZER_TYPE_ADAMW: + opt_step = ggml_opt_step_adamw(opt_ctx->ctx_compute, node, grad, m, v, adamw_params); + break; + case GGML_OPT_OPTIMIZER_TYPE_SGD: + opt_step = ggml_opt_step_sgd(opt_ctx->ctx_compute, node, grad, adamw_params); + break; + default: + GGML_ABORT("fatal error"); + } + ggml_format_name(opt_step, "%s step for %s", optimizer_name, node->name); ggml_build_forward_expand(opt_ctx->gb_opt, opt_step); } } @@ -534,6 +558,7 @@ ggml_opt_context_t ggml_opt_init(struct ggml_opt_params params) { result->opt_period = params.opt_period; result->get_opt_pars = params.get_opt_pars; result->get_opt_pars_ud = params.get_opt_pars_ud; + result->optimizer = params.optimizer; GGML_ASSERT(result->opt_period >= 1); @@ -756,29 +781,43 @@ void ggml_opt_alloc(ggml_opt_context_t opt_ctx, bool backward) { void ggml_opt_eval(ggml_opt_context_t opt_ctx, ggml_opt_result_t result) { GGML_ASSERT(opt_ctx->eval_ready); if (opt_ctx->allocated_graph == opt_ctx->gb_opt) { - struct ggml_opt_optimizer_params opt_pars = opt_ctx->get_opt_pars(opt_ctx->get_opt_pars_ud); + const ggml_opt_optimizer_params & opt_pars = opt_ctx->get_opt_pars(opt_ctx->get_opt_pars_ud); - GGML_ASSERT(opt_pars.adamw.alpha > 0.0f); - GGML_ASSERT(opt_pars.adamw.beta1 >= 0.0f); - GGML_ASSERT(opt_pars.adamw.beta1 <= 1.0f); - GGML_ASSERT(opt_pars.adamw.beta2 >= 0.0f); - GGML_ASSERT(opt_pars.adamw.beta2 <= 1.0f); - GGML_ASSERT(opt_pars.adamw.eps >= 0.0f); - GGML_ASSERT(opt_pars.adamw.wd >= 0.0f); - GGML_ASSERT(opt_pars.adamw.wd <= 1.0f); + switch (opt_ctx->optimizer) { + case GGML_OPT_OPTIMIZER_TYPE_ADAMW: { + GGML_ASSERT(opt_pars.adamw.alpha > 0.0f); + GGML_ASSERT(opt_pars.adamw.beta1 >= 0.0f); + GGML_ASSERT(opt_pars.adamw.beta1 <= 1.0f); + GGML_ASSERT(opt_pars.adamw.beta2 >= 0.0f); + GGML_ASSERT(opt_pars.adamw.beta2 <= 1.0f); + GGML_ASSERT(opt_pars.adamw.eps >= 0.0f); + GGML_ASSERT(opt_pars.adamw.wd >= 0.0f); + GGML_ASSERT(opt_pars.adamw.wd <= 1.0f); - // beta1, beta2 after applying warmup - const float beta1h = 1.0f/(1.0f - powf(opt_pars.adamw.beta1, opt_ctx->iter)); - const float beta2h = 1.0f/(1.0f - powf(opt_pars.adamw.beta2, opt_ctx->iter)); + // beta1, beta2 after applying warmup + const float beta1h = 1.0f / (1.0f - powf(opt_pars.adamw.beta1, opt_ctx->iter)); + const float beta2h = 1.0f / (1.0f - powf(opt_pars.adamw.beta2, opt_ctx->iter)); - float * adamw_par_data = ggml_get_data_f32(opt_ctx->adamw_params); - adamw_par_data[0] = opt_pars.adamw.alpha; - adamw_par_data[1] = opt_pars.adamw.beta1; - adamw_par_data[2] = opt_pars.adamw.beta2; - adamw_par_data[3] = opt_pars.adamw.eps; - adamw_par_data[4] = opt_pars.adamw.wd; - adamw_par_data[5] = beta1h; - adamw_par_data[6] = beta2h; + float * adamw_par_data = ggml_get_data_f32(opt_ctx->opt_step_params); + adamw_par_data[0] = opt_pars.adamw.alpha; + adamw_par_data[1] = opt_pars.adamw.beta1; + adamw_par_data[2] = opt_pars.adamw.beta2; + adamw_par_data[3] = opt_pars.adamw.eps; + adamw_par_data[4] = opt_pars.adamw.wd; + adamw_par_data[5] = beta1h; + adamw_par_data[6] = beta2h; + } break; + case GGML_OPT_OPTIMIZER_TYPE_SGD: { + GGML_ASSERT(opt_pars.sgd.alpha > 0.0f); + GGML_ASSERT(opt_pars.sgd.wd >= 0.0f); + GGML_ASSERT(opt_pars.sgd.wd <= 1.0f); + float * sgd = ggml_get_data_f32(opt_ctx->opt_step_params); + sgd[0] = opt_pars.sgd.alpha; + sgd[1] = opt_pars.sgd.wd; + } break; + default: + GGML_ABORT("fatal error"); + } } ggml_backend_sched_graph_compute(opt_ctx->backend_sched, opt_ctx->allocated_graph_copy); @@ -963,6 +1002,7 @@ void ggml_opt_fit( ggml_tensor * outputs, ggml_opt_dataset_t dataset, enum ggml_opt_loss_type loss_type, + enum ggml_opt_optimizer_type optimizer, ggml_opt_get_optimizer_params get_opt_pars, int64_t nepoch, int64_t nbatch_logical, @@ -993,6 +1033,7 @@ void ggml_opt_fit( params.opt_period = opt_period; params.get_opt_pars = get_opt_pars; params.get_opt_pars_ud = &epoch; + params.optimizer = optimizer; ggml_opt_context_t opt_ctx = ggml_opt_init(params); // Shuffling the data is generally useful but there is only a point if not all data is used in a single batch. @@ -1035,3 +1076,18 @@ void ggml_opt_fit( ggml_opt_result_free(result_train); ggml_opt_result_free(result_val); } + +enum ggml_opt_optimizer_type ggml_opt_context_optimizer_type(ggml_opt_context_t c) { + return c->optimizer; +} + +GGML_API const char * ggml_opt_optimizer_name(enum ggml_opt_optimizer_type o) { + switch (o) { + case GGML_OPT_OPTIMIZER_TYPE_ADAMW: + return "adamw"; + case GGML_OPT_OPTIMIZER_TYPE_SGD: + return "sgd"; + default: + return "undefined"; + }; +} diff --git a/ggml/src/ggml-quants.c b/ggml/src/ggml-quants.c index 94f6405ca1..727932123e 100644 --- a/ggml/src/ggml-quants.c +++ b/ggml/src/ggml-quants.c @@ -566,7 +566,7 @@ static float make_q3_quants(int n, int nmax, const float * GGML_RESTRICT x, int8 for (int i = 0; i < n; ++i) { L[i] += nmax; } - return sumlx / suml2; + return suml2 > 0.0f ? sumlx / suml2 : 0.0f; } for (int i = 0; i < n; ++i) { int l = nearest_int(iscale * x[i]); @@ -901,7 +901,7 @@ static float make_qp_quants(int n, int nmax, const float * GGML_RESTRICT x, uint for (int i = 0; i < n; ++i) { max = MAX(max, x[i]); } - if (!max) { // all zero + if (max < GROUP_MAX_EPS) { // all zero for (int i = 0; i < n; ++i) { L[i] = 0; } return 0.f; } @@ -966,7 +966,7 @@ static float make_qp_quants(int n, int nmax, const float * GGML_RESTRICT x, uint break; } } - return sumlx/suml2; + return suml2 > 0.0f ? sumlx / suml2 : 0.0f; } static void quantize_row_q2_K_impl(const float * GGML_RESTRICT x, block_q2_K * GGML_RESTRICT y, int k, const float * GGML_RESTRICT quant_weights) { @@ -4266,7 +4266,7 @@ static void quantize_row_iq1_s_impl(const float * GGML_RESTRICT x, void * GGML_R sumw[j+1] = sumw[j] + weight[i]; } } - float best_score = -FLT_MIN, scale = max; + float best_score = -FLT_MAX, scale = max; int besti1 = -1, besti2 = -1, best_shift = 0; for (int i1 = 0; i1 <= block_size; ++i1) { for (int i2 = i1; i2 <= block_size; ++i2) { @@ -4442,7 +4442,7 @@ static void quantize_row_iq1_m_impl(const float * GGML_RESTRICT x, void * GGML_R idx[2*j] = j; } qsort(pairs, block_size, 2*sizeof(float), iq1_sort_helper); - float best_score = -FLT_MIN, scale = max; + float best_score = -FLT_MAX, scale = max; int besti1 = -1, besti2 = -1, best_k = -1; // 0: +, + // 1: +, - diff --git a/ggml/src/ggml-rpc/ggml-rpc.cpp b/ggml/src/ggml-rpc/ggml-rpc.cpp index df6ba54076..e84ff93efc 100644 --- a/ggml/src/ggml-rpc/ggml-rpc.cpp +++ b/ggml/src/ggml-rpc/ggml-rpc.cpp @@ -29,9 +29,12 @@ #include #include #include +#include namespace fs = std::filesystem; +static constexpr size_t MAX_CHUNK_SIZE = 1024ull * 1024ull * 1024ull; // 1 GiB + #ifdef _WIN32 typedef SOCKET sockfd_t; using ssize_t = __int64; @@ -323,11 +326,14 @@ static std::shared_ptr create_server_socket(const char * host, int por static bool send_data(sockfd_t sockfd, const void * data, size_t size) { size_t bytes_sent = 0; while (bytes_sent < size) { - ssize_t n = send(sockfd, (const char *)data + bytes_sent, size - bytes_sent, 0); + size_t size_to_send = std::min(size - bytes_sent, MAX_CHUNK_SIZE); + ssize_t n = send(sockfd, (const char *)data + bytes_sent, size_to_send, 0); if (n < 0) { + GGML_LOG_ERROR("send failed (bytes_sent=%zu, size_to_send=%zu)\n", + bytes_sent, size_to_send); return false; } - bytes_sent += n; + bytes_sent += (size_t)n; } return true; } @@ -335,11 +341,18 @@ static bool send_data(sockfd_t sockfd, const void * data, size_t size) { static bool recv_data(sockfd_t sockfd, void * data, size_t size) { size_t bytes_recv = 0; while (bytes_recv < size) { - ssize_t n = recv(sockfd, (char *)data + bytes_recv, size - bytes_recv, 0); - if (n <= 0) { + size_t size_to_recv = std::min(size - bytes_recv, MAX_CHUNK_SIZE); + ssize_t n = recv(sockfd, (char *)data + bytes_recv, size_to_recv, 0); + if (n < 0) { + GGML_LOG_ERROR("recv failed (bytes_recv=%zu, size_to_recv=%zu)\n", + bytes_recv, size_to_recv); return false; } - bytes_recv += n; + if (n == 0) { + GGML_LOG_ERROR("recv returned 0 (peer closed?)\n"); + return false; + } + bytes_recv += (size_t)n; } return true; } diff --git a/ggml/src/ggml-sycl/ggml-sycl.cpp b/ggml/src/ggml-sycl/ggml-sycl.cpp index 3992dad01d..a0a650e92e 100644 --- a/ggml/src/ggml-sycl/ggml-sycl.cpp +++ b/ggml/src/ggml-sycl/ggml-sycl.cpp @@ -2705,9 +2705,9 @@ static void ggml_sycl_mul_mat_batched_sycl(ggml_backend_sycl_context & ctx, cons " : converting src1 to fp16"); // iterate tensor dims and find the slowest moving dim and stride - int64_t last_dim=0; - int64_t last_str=0; - int64_t largest_str=0; + int last_dim=0; + int last_str=0; + size_t largest_str=0; for(int i = 0; i< 4; i++){ // last stride is always the largest if(src1->nb[i] == largest_str){ @@ -2783,7 +2783,7 @@ static void ggml_sycl_mul_mat_batched_sycl(ggml_backend_sycl_context & ctx, cons auto launch_gemm_for_batches = [&ctx, queue](const sycl::half *src0, const sycl::half *src1, float *dst, int64_t a0, int64_t a1, int64_t batcha, - int64_t b0, int64_t b1, int64_t batchb, + int64_t /*b0*/, int64_t b1, int64_t batchb, int64_t sa0, int64_t sa1, int64_t sa2, int64_t sb0, int64_t sb1, int64_t sb2, int64_t sd2) { @@ -2832,14 +2832,26 @@ static void ggml_sycl_mul_mat_batched_sycl(ggml_backend_sycl_context & ctx, cons } }; - bool cont_batches_a = nb02 * ne02 == nb03; - bool cont_batches_b = nb12 * ne12 == nb13; - if (cont_batches_a && cont_batches_b) { + const bool cont_batches_dim2_a = nb02 * ne02 == nb03; + const bool cont_batches_dim2_b = nb12 * ne12 == nb13; + const bool cont_batches_dim3_a = ne02 == 1 && nb02 * ne01 == nb03; + const bool cont_batches_dim3_b = ne12 == 1 && nb12 * ne11 == nb13; + if (cont_batches_dim2_a && cont_batches_dim2_b) { + // A batch is considered contiguous if the dimension 2 is not strided int64_t batches0 = ne02 * ne03; int64_t batches1 = ne12 * ne13; launch_gemm_for_batches(src0_f16, src1_f16, dst_ddf, ne00, ne01, batches0, ne10, ne11, batches1, str_a0, str_a1, str_a2, str_b0, str_b1, str_b2, nb2 / sizeof(float)); + } else if (cont_batches_dim3_a && cont_batches_dim3_b) { + // This case is similar to the one above with the difference that only the batch in dimension 3 is used and the dimension 2 is of size 1. + int64_t batches0 = ne02 * ne03; + int64_t batches1 = ne12 * ne13; + int64_t str_a3 = nb03 / type_size_src0; + int64_t str_b3 = nb13 / type_size_src1; + launch_gemm_for_batches(src0_f16, src1_f16, dst_ddf, ne00, ne01, batches0, + ne10, ne11, batches1, str_a0, str_a1, str_a3, str_b0, str_b1, + str_b3, nb2 / sizeof(float)); } else { for (int64_t b_a = 0; b_a < ne03; b_a++) { const sycl::half *src0_f16_shifted @@ -4215,6 +4227,15 @@ static bool ggml_backend_sycl_device_supports_op(ggml_backend_dev_t dev, const g // FIXME: keep a list of supported types to avoid breaking the backend when a new type is added return false; } + // TODO: The configuration below needs more work to be supported with oneDNN + if (ggml_is_permuted(a) && !ggml_is_contiguous(a) && a->ne[2] > 1 && a->ne[3] > 1) { + return false; + } + // TODO: This specific configuration can fail with oneDNN and needs more debugging + if (!ggml_is_permuted(a) && ggml_is_permuted(b) && b->ne[2] > 1 && b->ne[3] > 1 && + a->ne[0] > 128 && a->ne[2] == 1 && src0_type == GGML_TYPE_F16) { + return false; + } return true; } case GGML_OP_OUT_PROD: diff --git a/ggml/src/ggml-vulkan/ggml-vulkan.cpp b/ggml/src/ggml-vulkan/ggml-vulkan.cpp index 4070e248ba..7ef938066b 100644 --- a/ggml/src/ggml-vulkan/ggml-vulkan.cpp +++ b/ggml/src/ggml-vulkan/ggml-vulkan.cpp @@ -103,6 +103,8 @@ static bool is_pow2(uint32_t x) { return x > 1 && (x & (x-1)) == 0; } struct ggml_backend_vk_context; #define MAX_PARAMETER_COUNT 8 +// Max number of adds that can be fused without exceeding MAX_PARAMETER_COUNT. +#define MAX_FUSED_ADDS (MAX_PARAMETER_COUNT - 2) struct vk_pipeline_struct { std::string name; @@ -343,6 +345,15 @@ enum vk_conv_shapes { CONV_SHAPE_COUNT, }; +enum dmmv_wg_sizes { + DMMV_WG_SIZE_SUBGROUP, + DMMV_WG_SIZE_LARGE, + DMMV_WG_SIZE_COUNT, +}; + +static constexpr uint32_t num_argsort_pipelines = 11; +static constexpr uint32_t max_argsort_cols = 1 << (num_argsort_pipelines-1); + struct vk_device_struct { std::recursive_mutex mutex; @@ -368,6 +379,7 @@ struct vk_device_struct { bool float_controls_rte_fp16; bool subgroup_add; bool subgroup_shuffle; + bool multi_add; bool integer_dot_product; @@ -429,8 +441,8 @@ struct vk_device_struct { vk_pipeline pipeline_quantize_q8_1; vk_pipeline pipeline_dequant[GGML_TYPE_COUNT]; - vk_pipeline pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_COUNT][mul_mat_vec_max_cols]; - vk_pipeline pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_COUNT][mul_mat_vec_max_cols]; + vk_pipeline pipeline_dequant_mul_mat_vec_f32_f32[DMMV_WG_SIZE_COUNT][GGML_TYPE_COUNT][mul_mat_vec_max_cols]; + vk_pipeline pipeline_dequant_mul_mat_vec_f16_f32[DMMV_WG_SIZE_COUNT][GGML_TYPE_COUNT][mul_mat_vec_max_cols]; vk_pipeline pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_COUNT]; vk_pipeline pipeline_mul_mat_vec_p021_f16_f32[p021_max_gqa_ratio]; @@ -449,12 +461,16 @@ struct vk_device_struct { vk_pipeline pipeline_div[2][2][2]; vk_pipeline pipeline_div_norepeat[2][2][2]; + // indexed by num_additional_fused_ops == num_adds - 1 + vk_pipeline pipeline_multi_add[MAX_FUSED_ADDS]; + vk_pipeline pipeline_add_id_f32; vk_pipeline pipeline_concat_f32, pipeline_concat_f16, pipeline_concat_i32; vk_pipeline pipeline_upscale_nearest_f32, pipeline_upscale_bilinear_f32, pipeline_upscale_bilinear_ac_f32; vk_pipeline pipeline_scale_f32; vk_pipeline pipeline_sqr_f32; + vk_pipeline pipeline_sqrt_f32; vk_pipeline pipeline_sin_f32; vk_pipeline pipeline_cos_f32; vk_pipeline pipeline_clamp_f32; @@ -499,7 +515,7 @@ struct vk_device_struct { vk_pipeline pipeline_rope_neox_f32, pipeline_rope_neox_f16; vk_pipeline pipeline_rope_multi_f32, pipeline_rope_multi_f16; vk_pipeline pipeline_rope_vision_f32, pipeline_rope_vision_f16; - vk_pipeline pipeline_argsort_f32; + vk_pipeline pipeline_argsort_f32[num_argsort_pipelines]; vk_pipeline pipeline_sum_rows_f32; vk_pipeline pipeline_argmax_f32; vk_pipeline pipeline_count_equal_i32; @@ -510,6 +526,7 @@ struct vk_device_struct { vk_pipeline pipeline_rwkv_wkv6_f32; vk_pipeline pipeline_rwkv_wkv7_f32; vk_pipeline pipeline_opt_step_adamw_f32; + vk_pipeline pipeline_opt_step_sgd_f32; vk_pipeline pipeline_conv2d_f32[CONV_SHAPE_COUNT]; vk_pipeline pipeline_conv2d_f16_f32[CONV_SHAPE_COUNT]; vk_pipeline pipeline_conv2d_dw_whcn_f32; @@ -800,6 +817,14 @@ struct vk_op_binary_push_constants { float param1; float param2; int32_t param3; }; +struct vk_op_multi_add_push_constants { + // shape for dst + uint32_t ne20; uint32_t ne21; uint32_t ne22; uint32_t ne23; + + // strides for srcs+dst + uint32_t nb[8][4]; +}; + struct vk_op_add_id_push_constants { uint32_t ne0; uint32_t ne1; @@ -855,7 +880,6 @@ struct vk_op_soft_max_push_constants { struct vk_op_argsort_push_constants { uint32_t ncols; - uint32_t ncols_pad; int32_t order; }; @@ -1110,17 +1134,23 @@ class vk_perf_logger { return; } if (node->op == GGML_OP_MUL_MAT || node->op == GGML_OP_MUL_MAT_ID) { - const uint64_t m = node->src[0]->ne[1]; - const uint64_t n = node->src[1]->ne[1]; - const uint64_t k = node->src[1]->ne[0]; - std::string name = ggml_op_name(node->op); - if (n == 1) { - name += "_VEC m=" + std::to_string(m) + " k=" + std::to_string(k); - } else { - name += " m=" + std::to_string(m) + " n=" + std::to_string(n) + " k=" + std::to_string(k); + const uint64_t m = node->src[0]->ne[1]; + const uint64_t n = node->ne[1]; + const uint64_t k = node->src[1]->ne[0]; + const uint64_t batch = node->src[1]->ne[2] * node->src[1]->ne[3]; + std::string name = ggml_op_name(node->op); + if ((node->op == GGML_OP_MUL_MAT && n <= mul_mat_vec_max_cols) || + (node->op == GGML_OP_MUL_MAT_ID && node->src[2]->ne[1] == 1)) { + name += "_VEC"; + } + name += " "; + name += ggml_type_name(node->src[0]->type); + name += " m=" + std::to_string(m) + " n=" + std::to_string(n) + " k=" + std::to_string(k); + if (batch > 1) { + name += " batch=" + std::to_string(batch); } timings[name].push_back(time); - flops[name].push_back(m * n * (k + (k - 1))); + flops[name].push_back(m * n * (k + (k - 1)) * batch); return; } if (node->op == GGML_OP_CONV_2D) { @@ -2380,26 +2410,26 @@ static void ggml_vk_load_shaders(vk_device& device) { CREATE_MM(pipeline_matmul_id_bf16, matmul_id_bf16, , wg_denoms, warptile, vk_mat_mat_id_push_constants, 4) } #endif - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0].f16acc, matmul_id_q4_0_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1].f16acc, matmul_id_q4_1_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0].f16acc, matmul_id_q5_0_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1].f16acc, matmul_id_q5_1_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0].f16acc, matmul_id_q8_0_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K].f16acc, matmul_id_q2_k_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K].f16acc, matmul_id_q3_k_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K].f16acc, matmul_id_q4_k_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K].f16acc, matmul_id_q5_k_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K].f16acc, matmul_id_q6_k_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_S].f16acc, matmul_id_iq1_s_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_M].f16acc, matmul_id_iq1_m_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XXS].f16acc, matmul_id_iq2_xxs_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XS].f16acc, matmul_id_iq2_xs_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_S].f16acc, matmul_id_iq2_s_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_XXS].f16acc, matmul_id_iq3_xxs_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_S].f16acc, matmul_id_iq3_s_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_XS].f16acc, matmul_id_iq4_xs_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL].f16acc, matmul_id_iq4_nl_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_MXFP4].f16acc, matmul_id_mxfp4_f16, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0], matmul_id_q4_0_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1], matmul_id_q4_1_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0], matmul_id_q5_0_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1], matmul_id_q5_1_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0], matmul_id_q8_0_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K], matmul_id_q2_k_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K], matmul_id_q3_k_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K], matmul_id_q4_k_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K], matmul_id_q5_k_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K], matmul_id_q6_k_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_S], matmul_id_iq1_s_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_M], matmul_id_iq1_m_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XXS], matmul_id_iq2_xxs_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XS], matmul_id_iq2_xs_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_S], matmul_id_iq2_s_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_XXS], matmul_id_iq3_xxs_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_S], matmul_id_iq3_s_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_XS], matmul_id_iq4_xs_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL], matmul_id_iq4_nl_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_MXFP4], matmul_id_mxfp4_f16, mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) #undef CREATE_MM #undef CREATE_MM2 } else @@ -2495,51 +2525,27 @@ static void ggml_vk_load_shaders(vk_device& device) { } #endif - if (device->coopmat_acc_f16_support) { - CREATE_MM(GGML_TYPE_Q4_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0].f16acc, matmul_id_q4_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q4_1, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1].f16acc, matmul_id_q4_1_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q5_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0].f16acc, matmul_id_q5_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q5_1, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1].f16acc, matmul_id_q5_1_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q8_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0].f16acc, matmul_id_q8_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q4_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0], matmul_id_q4_0_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q4_1, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1], matmul_id_q4_1_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q5_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0], matmul_id_q5_0_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q5_1, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1], matmul_id_q5_1_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q8_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0], matmul_id_q8_0_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q2_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K].f16acc, matmul_id_q2_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q3_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K].f16acc, matmul_id_q3_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q4_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K].f16acc, matmul_id_q4_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q5_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K].f16acc, matmul_id_q5_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q6_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K].f16acc, matmul_id_q6_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ1_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_S].f16acc, matmul_id_iq1_s_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ1_M, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_M].f16acc, matmul_id_iq1_m_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ2_XXS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XXS].f16acc, matmul_id_iq2_xxs_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ2_XS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XS].f16acc, matmul_id_iq2_xs_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ2_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_S].f16acc, matmul_id_iq2_s_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ3_XXS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_XXS].f16acc, matmul_id_iq3_xxs_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ3_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_S].f16acc, matmul_id_iq3_s_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ4_XS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_XS].f16acc, matmul_id_iq4_xs_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ4_NL, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL].f16acc, matmul_id_iq4_nl_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_MXFP4, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_MXFP4].f16acc, matmul_id_mxfp4_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - } else { - CREATE_MM(GGML_TYPE_Q4_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0].f16acc, matmul_id_q4_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q4_1, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1].f16acc, matmul_id_q4_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q5_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0].f16acc, matmul_id_q5_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q5_1, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1].f16acc, matmul_id_q5_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q8_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0].f16acc, matmul_id_q8_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - - CREATE_MM(GGML_TYPE_Q2_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K].f16acc, matmul_id_q2_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q3_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K].f16acc, matmul_id_q3_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q4_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K].f16acc, matmul_id_q4_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q5_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K].f16acc, matmul_id_q5_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q6_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K].f16acc, matmul_id_q6_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ1_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_S].f16acc, matmul_id_iq1_s_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ1_M, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_M].f16acc, matmul_id_iq1_m_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ2_XXS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XXS].f16acc, matmul_id_iq2_xxs_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ2_XS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XS].f16acc, matmul_id_iq2_xs_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ2_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_S].f16acc, matmul_id_iq2_s_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ3_XXS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_XXS].f16acc, matmul_id_iq3_xxs_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ3_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_S].f16acc, matmul_id_iq3_s_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ4_XS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_XS].f16acc, matmul_id_iq4_xs_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ4_NL, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL].f16acc, matmul_id_iq4_nl_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_MXFP4, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_MXFP4].f16acc, matmul_id_mxfp4_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - } + CREATE_MM2(GGML_TYPE_Q2_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K], matmul_id_q2_k_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q3_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K], matmul_id_q3_k_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q4_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K], matmul_id_q4_k_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q5_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K], matmul_id_q5_k_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q6_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K], matmul_id_q6_k_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ1_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_S], matmul_id_iq1_s_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ1_M, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_M], matmul_id_iq1_m_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ2_XXS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XXS], matmul_id_iq2_xxs_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ2_XS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XS], matmul_id_iq2_xs_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ2_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_S], matmul_id_iq2_s_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ3_XXS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_XXS], matmul_id_iq3_xxs_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ3_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_S], matmul_id_iq3_s_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ4_XS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_XS], matmul_id_iq4_xs_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ4_NL, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL], matmul_id_iq4_nl_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_MXFP4, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_MXFP4], matmul_id_mxfp4_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); #undef CREATE_MM2 #undef CREATE_MM } else @@ -2624,27 +2630,27 @@ static void ggml_vk_load_shaders(vk_device& device) { CREATE_MM(GGML_TYPE_BF16, pipeline_matmul_id_bf16, matmul_id_bf16, , wg_denoms, warptile, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q4_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0].f16acc, matmul_id_q4_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q4_1, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1].f16acc, matmul_id_q4_1_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q5_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0].f16acc, matmul_id_q5_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q5_1, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1].f16acc, matmul_id_q5_1_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q8_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0].f16acc, matmul_id_q8_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q4_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0], matmul_id_q4_0_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q4_1, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1], matmul_id_q4_1_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q5_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0], matmul_id_q5_0_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q5_1, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1], matmul_id_q5_1_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q8_0, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0], matmul_id_q8_0_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q2_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K].f16acc, matmul_id_q2_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q3_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K].f16acc, matmul_id_q3_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q4_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K].f16acc, matmul_id_q4_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q5_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K].f16acc, matmul_id_q5_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_Q6_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K].f16acc, matmul_id_q6_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ1_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_S].f16acc, matmul_id_iq1_s_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ1_M, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_M].f16acc, matmul_id_iq1_m_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ2_XXS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XXS].f16acc, matmul_id_iq2_xxs_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ2_XS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XS].f16acc, matmul_id_iq2_xs_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ2_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_S].f16acc, matmul_id_iq2_s_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ3_XXS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_XXS].f16acc, matmul_id_iq3_xxs_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ3_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_S].f16acc, matmul_id_iq3_s_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ4_XS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_XS].f16acc, matmul_id_iq4_xs_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_IQ4_NL, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL].f16acc, matmul_id_iq4_nl_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); - CREATE_MM(GGML_TYPE_MXFP4, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_MXFP4].f16acc, matmul_id_mxfp4_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q2_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K], matmul_id_q2_k_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q3_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K], matmul_id_q3_k_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q4_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K], matmul_id_q4_k_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q5_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K], matmul_id_q5_k_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_Q6_K, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K], matmul_id_q6_k_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ1_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_S], matmul_id_iq1_s_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ1_M, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ1_M], matmul_id_iq1_m_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ2_XXS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XXS], matmul_id_iq2_xxs_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ2_XS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_XS], matmul_id_iq2_xs_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ2_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ2_S], matmul_id_iq2_s_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ3_XXS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_XXS], matmul_id_iq3_xxs_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ3_S, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ3_S], matmul_id_iq3_s_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ4_XS, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_XS], matmul_id_iq4_xs_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_IQ4_NL, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL], matmul_id_iq4_nl_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM2(GGML_TYPE_MXFP4, pipeline_dequant_mul_mat_mat_id[GGML_TYPE_MXFP4], matmul_id_mxfp4_f32, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); #undef CREATE_MM2 #undef CREATE_MMQ #undef CREATE_MM @@ -2773,54 +2779,61 @@ static void ggml_vk_load_shaders(vk_device& device) { rm_stdq = 2; uint32_t rm_iq = 2 * rm_kq; - for (uint32_t i = 0; i < mul_mat_vec_max_cols; ++i) { - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_F32 ][i], "mul_mat_vec_f32_f32_f32_"+std::to_string(i+1), mul_mat_vec_f32_f32_f32_len, mul_mat_vec_f32_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2, i+1}, 1); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_F16 ][i], "mul_mat_vec_f16_f32_f32_"+std::to_string(i+1), mul_mat_vec_f16_f32_f32_len, mul_mat_vec_f16_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2, i+1}, 1); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_BF16][i], "mul_mat_vec_bf16_f32_f32_"+std::to_string(i+1), mul_mat_vec_bf16_f32_f32_len, mul_mat_vec_bf16_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2, i+1}, 1); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q4_0][i], "mul_mat_vec_q4_0_f32_f32_"+std::to_string(i+1), mul_mat_vec_q4_0_f32_f32_len, mul_mat_vec_q4_0_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {device->subgroup_size, 2*rm_stdq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q4_1][i], "mul_mat_vec_q4_1_f32_f32_"+std::to_string(i+1), mul_mat_vec_q4_1_f32_f32_len, mul_mat_vec_q4_1_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {device->subgroup_size, 2*rm_stdq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q5_0][i], "mul_mat_vec_q5_0_f32_f32_"+std::to_string(i+1), mul_mat_vec_q5_0_f32_f32_len, mul_mat_vec_q5_0_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {device->subgroup_size, 2*rm_stdq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q5_1][i], "mul_mat_vec_q5_1_f32_f32_"+std::to_string(i+1), mul_mat_vec_q5_1_f32_f32_len, mul_mat_vec_q5_1_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {device->subgroup_size, 2*rm_stdq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q8_0][i], "mul_mat_vec_q8_0_f32_f32_"+std::to_string(i+1), mul_mat_vec_q8_0_f32_f32_len, mul_mat_vec_q8_0_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1*rm_stdq, 1, 1}, {device->subgroup_size, 1*rm_stdq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q2_K][i], "mul_mat_vec_q2_k_f32_f32_"+std::to_string(i+1), mul_mat_vec_q2_k_f32_f32_len, mul_mat_vec_q2_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {subgroup_size_16, rm_kq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q3_K][i], "mul_mat_vec_q3_k_f32_f32_"+std::to_string(i+1), mul_mat_vec_q3_k_f32_f32_len, mul_mat_vec_q3_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {subgroup_size_16, rm_kq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q4_K][i], "mul_mat_vec_q4_k_f32_f32_"+std::to_string(i+1), mul_mat_vec_q4_k_f32_f32_len, mul_mat_vec_q4_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {subgroup_size_16, rm_kq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q5_K][i], "mul_mat_vec_q5_k_f32_f32_"+std::to_string(i+1), mul_mat_vec_q5_k_f32_f32_len, mul_mat_vec_q5_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {subgroup_size_16, rm_kq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q6_K][i], "mul_mat_vec_q6_k_f32_f32_"+std::to_string(i+1), mul_mat_vec_q6_k_f32_f32_len, mul_mat_vec_q6_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {subgroup_size_16, rm_kq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_IQ1_S][i], "mul_mat_vec_iq1_s_f32_f32_"+std::to_string(i+1), mul_mat_vec_iq1_s_f32_f32_len, mul_mat_vec_iq1_s_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_IQ1_M][i], "mul_mat_vec_iq1_m_f32_f32_"+std::to_string(i+1), mul_mat_vec_iq1_m_f32_f32_len, mul_mat_vec_iq1_m_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_IQ2_XXS][i], "mul_mat_vec_iq2_xxs_f32_f32_"+std::to_string(i+1), mul_mat_vec_iq2_xxs_f32_f32_len, mul_mat_vec_iq2_xxs_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_IQ2_XS][i], "mul_mat_vec_iq2_xs_f32_f32_"+std::to_string(i+1), mul_mat_vec_iq2_xs_f32_f32_len, mul_mat_vec_iq2_xs_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_IQ2_S][i], "mul_mat_vec_iq2_s_f32_f32_"+std::to_string(i+1), mul_mat_vec_iq2_s_f32_f32_len, mul_mat_vec_iq2_s_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_IQ3_XXS][i], "mul_mat_vec_iq3_xxs_f32_f32_"+std::to_string(i+1), mul_mat_vec_iq3_xxs_f32_f32_len, mul_mat_vec_iq3_xxs_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_IQ3_S][i], "mul_mat_vec_iq3_s_f32_f32_"+std::to_string(i+1), mul_mat_vec_iq3_s_f32_f32_len, mul_mat_vec_iq3_s_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_IQ4_XS][i], "mul_mat_vec_iq4_xs_f32_f32_"+std::to_string(i+1), mul_mat_vec_iq4_xs_f32_f32_len, mul_mat_vec_iq4_xs_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_IQ4_NL][i], "mul_mat_vec_iq4_nl_f32_f32_"+std::to_string(i+1), mul_mat_vec_iq4_nl_f32_f32_len, mul_mat_vec_iq4_nl_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_MXFP4][i], "mul_mat_vec_mxfp4_f32_f32_"+std::to_string(i+1), mul_mat_vec_mxfp4_f32_f32_len, mul_mat_vec_mxfp4_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); + for (uint32_t w = 0; w < DMMV_WG_SIZE_COUNT; ++w) { + uint32_t wg_size_subgroup16 = (w == DMMV_WG_SIZE_SUBGROUP) ? subgroup_size_16 : (subgroup_size_16 * 4); + uint32_t wg_size_subgroup = (w == DMMV_WG_SIZE_SUBGROUP) ? device->subgroup_size : (device->subgroup_size * 4); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_F32 ][i], "mul_mat_vec_f32_f16_f32_"+std::to_string(i+1), mul_mat_vec_f32_f16_f32_len, mul_mat_vec_f32_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2, i+1}, 1); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_F16 ][i], "mul_mat_vec_f16_f16_f32_"+std::to_string(i+1), mul_mat_vec_f16_f16_f32_len, mul_mat_vec_f16_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2, i+1}, 1); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_BF16][i], "mul_mat_vec_bf16_f16_f32_"+std::to_string(i+1), mul_mat_vec_bf16_f16_f32_len, mul_mat_vec_bf16_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2, i+1}, 1); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q4_0][i], "mul_mat_vec_q4_0_f16_f32_"+std::to_string(i+1), mul_mat_vec_q4_0_f16_f32_len, mul_mat_vec_q4_0_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {device->subgroup_size, 2*rm_stdq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q4_1][i], "mul_mat_vec_q4_1_f16_f32_"+std::to_string(i+1), mul_mat_vec_q4_1_f16_f32_len, mul_mat_vec_q4_1_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {device->subgroup_size, 2*rm_stdq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q5_0][i], "mul_mat_vec_q5_0_f16_f32_"+std::to_string(i+1), mul_mat_vec_q5_0_f16_f32_len, mul_mat_vec_q5_0_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {device->subgroup_size, 2*rm_stdq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q5_1][i], "mul_mat_vec_q5_1_f16_f32_"+std::to_string(i+1), mul_mat_vec_q5_1_f16_f32_len, mul_mat_vec_q5_1_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {device->subgroup_size, 2*rm_stdq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q8_0][i], "mul_mat_vec_q8_0_f16_f32_"+std::to_string(i+1), mul_mat_vec_q8_0_f16_f32_len, mul_mat_vec_q8_0_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1*rm_stdq, 1, 1}, {device->subgroup_size, 1*rm_stdq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q2_K][i], "mul_mat_vec_q2_k_f16_f32_"+std::to_string(i+1), mul_mat_vec_q2_k_f16_f32_len, mul_mat_vec_q2_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {subgroup_size_16, rm_kq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q3_K][i], "mul_mat_vec_q3_k_f16_f32_"+std::to_string(i+1), mul_mat_vec_q3_k_f16_f32_len, mul_mat_vec_q3_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {subgroup_size_16, rm_kq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q4_K][i], "mul_mat_vec_q4_k_f16_f32_"+std::to_string(i+1), mul_mat_vec_q4_k_f16_f32_len, mul_mat_vec_q4_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {subgroup_size_16, rm_kq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q5_K][i], "mul_mat_vec_q5_k_f16_f32_"+std::to_string(i+1), mul_mat_vec_q5_k_f16_f32_len, mul_mat_vec_q5_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {subgroup_size_16, rm_kq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q6_K][i], "mul_mat_vec_q6_k_f16_f32_"+std::to_string(i+1), mul_mat_vec_q6_k_f16_f32_len, mul_mat_vec_q6_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {subgroup_size_16, rm_kq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_IQ1_S][i], "mul_mat_vec_iq1_s_f16_f32_"+std::to_string(i+1), mul_mat_vec_iq1_s_f16_f32_len, mul_mat_vec_iq1_s_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_IQ1_M][i], "mul_mat_vec_iq1_m_f16_f32_"+std::to_string(i+1), mul_mat_vec_iq1_m_f16_f32_len, mul_mat_vec_iq1_m_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_IQ2_XXS][i], "mul_mat_vec_iq2_xxs_f16_f32_"+std::to_string(i+1), mul_mat_vec_iq2_xxs_f16_f32_len, mul_mat_vec_iq2_xxs_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_IQ2_XS][i], "mul_mat_vec_iq2_xs_f16_f32_"+std::to_string(i+1), mul_mat_vec_iq2_xs_f16_f32_len, mul_mat_vec_iq2_xs_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_IQ2_S][i], "mul_mat_vec_iq2_s_f16_f32_"+std::to_string(i+1), mul_mat_vec_iq2_s_f16_f32_len, mul_mat_vec_iq2_s_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_IQ3_XXS][i], "mul_mat_vec_iq3_xxs_f16_f32_"+std::to_string(i+1), mul_mat_vec_iq3_xxs_f16_f32_len, mul_mat_vec_iq3_xxs_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_IQ3_S][i], "mul_mat_vec_iq3_s_f16_f32_"+std::to_string(i+1), mul_mat_vec_iq3_s_f16_f32_len, mul_mat_vec_iq3_s_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_IQ4_XS][i], "mul_mat_vec_iq4_xs_f16_f32_"+std::to_string(i+1), mul_mat_vec_iq4_xs_f16_f32_len, mul_mat_vec_iq4_xs_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_IQ4_NL][i], "mul_mat_vec_iq4_nl_f16_f32_"+std::to_string(i+1), mul_mat_vec_iq4_nl_f16_f32_len, mul_mat_vec_iq4_nl_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_MXFP4][i], "mul_mat_vec_mxfp4_f16_f32_"+std::to_string(i+1), mul_mat_vec_mxfp4_f16_f32_len, mul_mat_vec_mxfp4_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {subgroup_size_16, rm_iq, i+1}, 1, true); + const bool s = device->subgroup_add && device->architecture != vk_device_architecture::AMD_GCN; + + for (uint32_t i = 0; i < mul_mat_vec_max_cols; ++i) { + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_F32 ][i], "mul_mat_vec_f32_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_f32_f32_f32_len[s], arr_dmmv_f32_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {wg_size_subgroup, 2, i+1}, 1); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_F16 ][i], "mul_mat_vec_f16_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_f16_f32_f32_len[s], arr_dmmv_f16_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {wg_size_subgroup, 2, i+1}, 1); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_BF16][i], "mul_mat_vec_bf16_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_bf16_f32_f32_len[s], arr_dmmv_bf16_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {wg_size_subgroup, 2, i+1}, 1); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_Q4_0][i], "mul_mat_vec_q4_0_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q4_0_f32_f32_len[s], arr_dmmv_q4_0_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {wg_size_subgroup, 2*rm_stdq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_Q4_1][i], "mul_mat_vec_q4_1_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q4_1_f32_f32_len[s], arr_dmmv_q4_1_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {wg_size_subgroup, 2*rm_stdq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_Q5_0][i], "mul_mat_vec_q5_0_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q5_0_f32_f32_len[s], arr_dmmv_q5_0_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {wg_size_subgroup, 2*rm_stdq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_Q5_1][i], "mul_mat_vec_q5_1_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q5_1_f32_f32_len[s], arr_dmmv_q5_1_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {wg_size_subgroup, 2*rm_stdq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_Q8_0][i], "mul_mat_vec_q8_0_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q8_0_f32_f32_len[s], arr_dmmv_q8_0_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {1*rm_stdq, 1, 1}, {wg_size_subgroup, 1*rm_stdq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_Q2_K][i], "mul_mat_vec_q2_k_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q2_k_f32_f32_len[s], arr_dmmv_q2_k_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {wg_size_subgroup16, rm_kq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_Q3_K][i], "mul_mat_vec_q3_k_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q3_k_f32_f32_len[s], arr_dmmv_q3_k_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {wg_size_subgroup16, rm_kq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_Q4_K][i], "mul_mat_vec_q4_k_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q4_k_f32_f32_len[s], arr_dmmv_q4_k_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {wg_size_subgroup16, rm_kq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_Q5_K][i], "mul_mat_vec_q5_k_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q5_k_f32_f32_len[s], arr_dmmv_q5_k_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {wg_size_subgroup16, rm_kq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_Q6_K][i], "mul_mat_vec_q6_k_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q6_k_f32_f32_len[s], arr_dmmv_q6_k_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {wg_size_subgroup16, rm_kq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_IQ1_S][i], "mul_mat_vec_iq1_s_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq1_s_f32_f32_len[s], arr_dmmv_iq1_s_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_IQ1_M][i], "mul_mat_vec_iq1_m_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq1_m_f32_f32_len[s], arr_dmmv_iq1_m_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_IQ2_XXS][i], "mul_mat_vec_iq2_xxs_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq2_xxs_f32_f32_len[s], arr_dmmv_iq2_xxs_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_IQ2_XS][i], "mul_mat_vec_iq2_xs_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq2_xs_f32_f32_len[s], arr_dmmv_iq2_xs_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_IQ2_S][i], "mul_mat_vec_iq2_s_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq2_s_f32_f32_len[s], arr_dmmv_iq2_s_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_IQ3_XXS][i], "mul_mat_vec_iq3_xxs_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq3_xxs_f32_f32_len[s], arr_dmmv_iq3_xxs_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_IQ3_S][i], "mul_mat_vec_iq3_s_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq3_s_f32_f32_len[s], arr_dmmv_iq3_s_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_IQ4_XS][i], "mul_mat_vec_iq4_xs_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq4_xs_f32_f32_len[s], arr_dmmv_iq4_xs_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_IQ4_NL][i], "mul_mat_vec_iq4_nl_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq4_nl_f32_f32_len[s], arr_dmmv_iq4_nl_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[w][GGML_TYPE_MXFP4][i], "mul_mat_vec_mxfp4_f32_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_mxfp4_f32_f32_len[s], arr_dmmv_mxfp4_f32_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_F32 ][i], "mul_mat_vec_f32_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_f32_f16_f32_len[s], arr_dmmv_f32_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {wg_size_subgroup, 2, i+1}, 1); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_F16 ][i], "mul_mat_vec_f16_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_f16_f16_f32_len[s], arr_dmmv_f16_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {wg_size_subgroup, 2, i+1}, 1); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_BF16][i], "mul_mat_vec_bf16_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_bf16_f16_f32_len[s], arr_dmmv_bf16_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {wg_size_subgroup, 2, i+1}, 1); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_Q4_0][i], "mul_mat_vec_q4_0_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q4_0_f16_f32_len[s], arr_dmmv_q4_0_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {wg_size_subgroup, 2*rm_stdq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_Q4_1][i], "mul_mat_vec_q4_1_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q4_1_f16_f32_len[s], arr_dmmv_q4_1_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {wg_size_subgroup, 2*rm_stdq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_Q5_0][i], "mul_mat_vec_q5_0_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q5_0_f16_f32_len[s], arr_dmmv_q5_0_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {wg_size_subgroup, 2*rm_stdq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_Q5_1][i], "mul_mat_vec_q5_1_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q5_1_f16_f32_len[s], arr_dmmv_q5_1_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {2*rm_stdq, 1, 1}, {wg_size_subgroup, 2*rm_stdq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_Q8_0][i], "mul_mat_vec_q8_0_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q8_0_f16_f32_len[s], arr_dmmv_q8_0_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {1*rm_stdq, 1, 1}, {wg_size_subgroup, 1*rm_stdq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_Q2_K][i], "mul_mat_vec_q2_k_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q2_k_f16_f32_len[s], arr_dmmv_q2_k_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {wg_size_subgroup16, rm_kq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_Q3_K][i], "mul_mat_vec_q3_k_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q3_k_f16_f32_len[s], arr_dmmv_q3_k_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {wg_size_subgroup16, rm_kq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_Q4_K][i], "mul_mat_vec_q4_k_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q4_k_f16_f32_len[s], arr_dmmv_q4_k_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {wg_size_subgroup16, rm_kq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_Q5_K][i], "mul_mat_vec_q5_k_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q5_k_f16_f32_len[s], arr_dmmv_q5_k_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {wg_size_subgroup16, rm_kq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_Q6_K][i], "mul_mat_vec_q6_k_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_q6_k_f16_f32_len[s], arr_dmmv_q6_k_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_kq, 1, 1}, {wg_size_subgroup16, rm_kq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_IQ1_S][i], "mul_mat_vec_iq1_s_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq1_s_f16_f32_len[s], arr_dmmv_iq1_s_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_IQ1_M][i], "mul_mat_vec_iq1_m_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq1_m_f16_f32_len[s], arr_dmmv_iq1_m_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_IQ2_XXS][i], "mul_mat_vec_iq2_xxs_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq2_xxs_f16_f32_len[s], arr_dmmv_iq2_xxs_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_IQ2_XS][i], "mul_mat_vec_iq2_xs_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq2_xs_f16_f32_len[s], arr_dmmv_iq2_xs_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_IQ2_S][i], "mul_mat_vec_iq2_s_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq2_s_f16_f32_len[s], arr_dmmv_iq2_s_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_IQ3_XXS][i], "mul_mat_vec_iq3_xxs_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq3_xxs_f16_f32_len[s], arr_dmmv_iq3_xxs_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_IQ3_S][i], "mul_mat_vec_iq3_s_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq3_s_f16_f32_len[s], arr_dmmv_iq3_s_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_IQ4_XS][i], "mul_mat_vec_iq4_xs_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq4_xs_f16_f32_len[s], arr_dmmv_iq4_xs_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_IQ4_NL][i], "mul_mat_vec_iq4_nl_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_iq4_nl_f16_f32_len[s], arr_dmmv_iq4_nl_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[w][GGML_TYPE_MXFP4][i], "mul_mat_vec_mxfp4_f16_f32_"+std::to_string(w)+"_"+std::to_string(i+1), arr_dmmv_mxfp4_f16_f32_len[s], arr_dmmv_mxfp4_f16_f32_data[s], "main", 3, sizeof(vk_mat_vec_push_constants), {rm_iq, 1, 1}, {wg_size_subgroup16, rm_iq, i+1}, 1, true); + } } ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_F32 ], "mul_mat_vec_id_f32_f32", mul_mat_vec_id_f32_f32_len, mul_mat_vec_id_f32_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1); @@ -3011,6 +3024,12 @@ static void ggml_vk_load_shaders(vk_device& device) { CREATE_BINARY(div, _norepeat, {1}) #undef CREATE_BINARY + if (device->multi_add) { + for (uint32_t i = 0; i < MAX_FUSED_ADDS; ++i) { + ggml_vk_create_pipeline(device, device->pipeline_multi_add[i], "multi_add_f32_" + std::to_string(i+1), multi_add_f32_len, multi_add_f32_data, "main", MAX_PARAMETER_COUNT, sizeof(vk_op_multi_add_push_constants), {512, 1, 1}, {i+2}, 1); + } + } + ggml_vk_create_pipeline(device, device->pipeline_add_id_f32, "add_id_f32", add_id_f32_len, add_id_f32_data, "main", 4, sizeof(vk_op_add_id_push_constants), {1, 1, 1}, {}, 1); ggml_vk_create_pipeline(device, device->pipeline_acc_f32, "acc_f32", acc_f32_len, acc_f32_data, "main", 3, sizeof(vk_op_binary_push_constants), {512, 1, 1}, {}, 1); @@ -3026,6 +3045,7 @@ static void ggml_vk_load_shaders(vk_device& device) { ggml_vk_create_pipeline(device, device->pipeline_scale_f32, "scale_f32", scale_f32_len, scale_f32_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); ggml_vk_create_pipeline(device, device->pipeline_sqr_f32, "sqr_f32", sqr_f32_len, sqr_f32_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(device, device->pipeline_sqrt_f32, "sqrt_f32", sqrt_f32_len, sqrt_f32_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); ggml_vk_create_pipeline(device, device->pipeline_sin_f32, "sin_f32", sin_f32_len, sin_f32_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); ggml_vk_create_pipeline(device, device->pipeline_cos_f32, "cos_f32", cos_f32_len, cos_f32_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); @@ -3096,7 +3116,9 @@ static void ggml_vk_load_shaders(vk_device& device) { ggml_vk_create_pipeline(device, device->pipeline_rope_vision_f16, "rope_vision_f16", rope_vision_f16_len, rope_vision_f16_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); } - ggml_vk_create_pipeline(device, device->pipeline_argsort_f32, "argsort_f32", argsort_f32_len, argsort_f32_data, "main", 2, sizeof(vk_op_argsort_push_constants), {1024, 1, 1}, {}, 1); + for (uint32_t i = 0; i < num_argsort_pipelines; ++i) { + ggml_vk_create_pipeline(device, device->pipeline_argsort_f32[i], "argsort_f32_"+std::to_string(i), argsort_f32_len, argsort_f32_data, "main", 2, sizeof(vk_op_argsort_push_constants), {1u<pipeline_argmax_f32, "argmax_f32", argmax_f32_len, argmax_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, { device->subgroup_size }, 1); @@ -3123,6 +3145,8 @@ static void ggml_vk_load_shaders(vk_device& device) { ggml_vk_create_pipeline(device, device->pipeline_opt_step_adamw_f32, "opt_step_adamw_f32", opt_step_adamw_f32_len, opt_step_adamw_f32_data, "main", 5, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(device, device->pipeline_opt_step_sgd_f32, "opt_step_sgd_f32", opt_step_sgd_f32_len, opt_step_sgd_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + // conv2d for (uint32_t s = 0; s < CONV_SHAPE_COUNT; ++s) { uint32_t conv2d_WG_SIZE = 256; @@ -3548,6 +3572,12 @@ static vk_device ggml_vk_get_device(size_t idx) { device->pipeline_robustness = pl_robustness_features.pipelineRobustness; + device->multi_add = vk12_props.shaderRoundingModeRTEFloat16 && + device->properties.limits.maxPushConstantsSize >= sizeof(vk_op_multi_add_push_constants) && + vk12_features.runtimeDescriptorArray && + device->vendor_id != VK_VENDOR_ID_INTEL && + getenv("GGML_VK_DISABLE_MULTI_ADD") == nullptr; + if (device->subgroup_size_control) { device->subgroup_min_size = subgroup_size_control_props.minSubgroupSize; device->subgroup_max_size = subgroup_size_control_props.maxSubgroupSize; @@ -4372,7 +4402,7 @@ static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_pipeline(ggml_backend_vk_conte return (ctx->device->fp16 && prec == GGML_PREC_DEFAULT) ? ctx->device->pipeline_dequant_mul_mat_mat[src0_type].f16acc : ctx->device->pipeline_dequant_mul_mat_mat[src0_type].f32acc; } -static vk_pipeline ggml_vk_get_dequantize_mul_mat_vec(ggml_backend_vk_context * ctx, ggml_type a_type, ggml_type b_type, uint32_t num_cols) { +static vk_pipeline ggml_vk_get_dequantize_mul_mat_vec(ggml_backend_vk_context * ctx, ggml_type a_type, ggml_type b_type, uint32_t num_cols, uint32_t m, uint32_t k) { VK_LOG_DEBUG("ggml_vk_get_dequantize_mul_mat_vec()"); GGML_ASSERT(b_type == GGML_TYPE_F32 || b_type == GGML_TYPE_F16); GGML_ASSERT(num_cols >= 1 && num_cols <= mul_mat_vec_max_cols); @@ -4406,7 +4436,24 @@ static vk_pipeline ggml_vk_get_dequantize_mul_mat_vec(ggml_backend_vk_context * return nullptr; } - return b_type == GGML_TYPE_F32 ? ctx->device->pipeline_dequant_mul_mat_vec_f32_f32[a_type][num_cols-1] : ctx->device->pipeline_dequant_mul_mat_vec_f16_f32[a_type][num_cols-1]; + // heuristic to choose workgroup size + uint32_t dmmv_wg = DMMV_WG_SIZE_SUBGROUP; + if (ctx->device->vendor_id == VK_VENDOR_ID_NVIDIA || ctx->device->vendor_id == VK_VENDOR_ID_INTEL) { + // Prefer larger workgroups when M is small, to spread the work out more + // and keep more SMs busy. + // q6_k seems to prefer small workgroup size even for "medium" values of M. + if (a_type == GGML_TYPE_Q6_K) { + if (m < 4096 && k >= 1024) { + dmmv_wg = DMMV_WG_SIZE_LARGE; + } + } else { + if (m <= 8192 && k >= 1024) { + dmmv_wg = DMMV_WG_SIZE_LARGE; + } + } + } + + return b_type == GGML_TYPE_F32 ? ctx->device->pipeline_dequant_mul_mat_vec_f32_f32[dmmv_wg][a_type][num_cols-1] : ctx->device->pipeline_dequant_mul_mat_vec_f16_f32[dmmv_wg][a_type][num_cols-1]; } static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_id_pipeline(ggml_backend_vk_context * ctx, ggml_type src0_type, ggml_type src1_type, ggml_prec prec) { @@ -4461,7 +4508,17 @@ static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_id_pipeline(ggml_backend_vk_co return nullptr; } - return ctx->device->fp16 ? ctx->device->pipeline_dequant_mul_mat_mat_id[src0_type].f16acc : ctx->device->pipeline_dequant_mul_mat_mat_id[src0_type].f32acc; + // XXX TODO 'prec' is not actually allowed in mul_mat_id. + bool prefer_fp16acc = ctx->device->fp16 /*&& prec == GGML_PREC_DEFAULT*/; + bool support_fp16acc = ctx->device->pipeline_dequant_mul_mat_mat_id[src0_type].f16acc != nullptr; + bool support_fp32acc = ctx->device->pipeline_dequant_mul_mat_mat_id[src0_type].f32acc != nullptr; + + if (support_fp16acc && (prefer_fp16acc || !support_fp32acc)) { + return ctx->device->pipeline_dequant_mul_mat_mat_id[src0_type].f16acc; + } else { + GGML_ASSERT(support_fp32acc); + return ctx->device->pipeline_dequant_mul_mat_mat_id[src0_type].f32acc; + } } static vk_pipeline ggml_vk_get_dequantize_mul_mat_vec_id(ggml_backend_vk_context * ctx, ggml_type a_type, ggml_type b_type) { @@ -5703,7 +5760,7 @@ static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context& } else { to_fp16_vk_1 = ggml_vk_get_to_fp16(ctx, src1->type); } - vk_pipeline dmmv = ggml_vk_get_dequantize_mul_mat_vec(ctx, src0->type, src1->type, ne11); + vk_pipeline dmmv = ggml_vk_get_dequantize_mul_mat_vec(ctx, src0->type, src1->type, ne11, ne20, ne00); GGML_ASSERT(!qx_needs_dequant || to_fp16_vk_0 != nullptr); // NOLINT GGML_ASSERT(!qy_needs_dequant || to_fp16_vk_1 != nullptr); // NOLINT GGML_ASSERT(dmmv != nullptr); @@ -6892,6 +6949,9 @@ static vk_pipeline ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const switch (op) { case GGML_OP_ADD: { + if (ctx->num_additional_fused_ops > 0) { + return ctx->device->pipeline_multi_add[ctx->num_additional_fused_ops]; + } auto pipelines = ggml_are_same_shape(src0, src1) ? ctx->device->pipeline_add_norepeat : ctx->device->pipeline_add; return pipelines[src0->type == GGML_TYPE_F16][src1->type == GGML_TYPE_F16][dst->type == GGML_TYPE_F16]; } @@ -6953,6 +7013,11 @@ static vk_pipeline ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const return ctx->device->pipeline_sqr_f32; } return nullptr; + case GGML_OP_SQRT: + if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { + return ctx->device->pipeline_sqrt_f32; + } + return nullptr; case GGML_OP_SIN: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { return ctx->device->pipeline_sin_f32; @@ -7136,7 +7201,8 @@ static vk_pipeline ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const } case GGML_OP_ARGSORT: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_I32) { - return ctx->device->pipeline_argsort_f32; + uint32_t idx = (uint32_t)ceilf(log2f(float(dst->ne[0]))); + return ctx->device->pipeline_argsort_f32[idx]; } return nullptr; case GGML_OP_SUM: @@ -7193,6 +7259,11 @@ static vk_pipeline ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const return ctx->device->pipeline_opt_step_adamw_f32; } return nullptr; + case GGML_OP_OPT_STEP_SGD: + if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { + return ctx->device->pipeline_opt_step_sgd_f32; + } + return nullptr; case GGML_OP_LEAKY_RELU: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { return ctx->device->pipeline_leaky_relu_f32; @@ -7256,6 +7327,7 @@ static bool ggml_vk_op_supports_incontiguous(ggml_op op) { case GGML_OP_CONCAT: case GGML_OP_UPSCALE: case GGML_OP_SQR: + case GGML_OP_SQRT: case GGML_OP_SIN: case GGML_OP_COS: case GGML_OP_CLAMP: @@ -7561,6 +7633,7 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context& subctx, co case GGML_OP_MUL: case GGML_OP_SCALE: case GGML_OP_SQR: + case GGML_OP_SQRT: case GGML_OP_SIN: case GGML_OP_COS: case GGML_OP_CLAMP: @@ -7692,6 +7765,10 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context& subctx, co ggml_vk_buffer_memset_async(subctx, d_D, d_buf_offset, 0, d_sz); ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { vk_subbuffer{ d_X, x_buf_offset, x_sz }, vk_subbuffer{ d_Y, y_buf_offset, y_sz }, vk_subbuffer{ d_D, d_buf_offset, d_sz } }, pc, elements); + } else if (op == GGML_OP_OPT_STEP_SGD) { + // OPT_STEP_SGD works on src0, it does not need dst + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { vk_subbuffer{ d_X, x_buf_offset, x_sz }, vk_subbuffer{ d_Y, y_buf_offset, y_sz }, vk_subbuffer{ d_Z, z_buf_offset, z_sz } }, pc, elements); } else if (use_src2) { ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { vk_subbuffer{ d_X, x_buf_offset, x_sz }, vk_subbuffer{ d_Y, y_buf_offset, y_sz }, vk_subbuffer{ d_Z, z_buf_offset, z_sz }, vk_subbuffer{ d_D, d_buf_offset, d_sz } }, pc, elements); @@ -7739,6 +7816,107 @@ static void ggml_vk_acc(ggml_backend_vk_context * ctx, vk_context& subctx, const }, dryrun); } +static void ggml_vk_multi_add(ggml_backend_vk_context * ctx, vk_context& subctx, ggml_cgraph * cgraph, int node_idx, bool dryrun = false) { + const ggml_tensor *first_node = cgraph->nodes[node_idx]; + const ggml_tensor *dst = cgraph->nodes[node_idx + ctx->num_additional_fused_ops]; + + // Make a list of all the tensors used by the op. + // Last element of the list is the dest tensor. + const ggml_tensor *tensors[MAX_PARAMETER_COUNT]; + uint32_t num_srcs = ctx->num_additional_fused_ops + 2; + uint32_t num_tensors = num_srcs + 1; + GGML_ASSERT(num_tensors <= MAX_PARAMETER_COUNT); + + tensors[0] = first_node->src[0]; + tensors[1] = first_node->src[1]; + for (int32_t i = 0; i < ctx->num_additional_fused_ops; ++i) { + // check whether the previous result is src[0] or src[1] + if (cgraph->nodes[node_idx + i] == cgraph->nodes[node_idx + i + 1]->src[0]) { + tensors[i+2] = cgraph->nodes[node_idx + i + 1]->src[1]; + } else { + tensors[i+2] = cgraph->nodes[node_idx + i + 1]->src[0]; + } + } + tensors[num_srcs] = dst; + + vk_op_multi_add_push_constants pc; + pc.ne20 = (uint32_t)dst->ne[0]; + pc.ne21 = (uint32_t)dst->ne[1]; + pc.ne22 = (uint32_t)dst->ne[2]; + pc.ne23 = (uint32_t)dst->ne[3]; + + for (uint32_t i = 0; i < num_tensors; ++i) { + const ggml_tensor *t = tensors[i]; + pc.nb[i][0] = (uint32_t)t->nb[0] / sizeof(float); + pc.nb[i][1] = (uint32_t)t->nb[1] / sizeof(float); + pc.nb[i][2] = (uint32_t)t->nb[2] / sizeof(float); + pc.nb[i][3] = (uint32_t)t->nb[3] / sizeof(float); + } + + vk_pipeline pipeline = ctx->device->pipeline_multi_add[ctx->num_additional_fused_ops]; + + if (pipeline == nullptr) { + std::cerr << "ggml_vulkan: Error: Missing multi_add"; + GGML_ABORT("fatal error"); + } + + if (dryrun) { + ggml_pipeline_request_descriptor_sets(ctx, pipeline, 1); + return; + } + + ggml_backend_vk_buffer_context * buf_ctx[MAX_PARAMETER_COUNT]; + vk_buffer buf[MAX_PARAMETER_COUNT]; + size_t offset[MAX_PARAMETER_COUNT]; + bool uma[MAX_PARAMETER_COUNT]; + + for (uint32_t i = 0; i < num_tensors; ++i) { + buf_ctx[i] = (ggml_backend_vk_buffer_context *)tensors[i]->buffer->context; + buf[i] = nullptr; + offset[i] = 0; + uma[i] = false; + + if (ctx->device->uma) { + ggml_vk_host_get(ctx->device, tensors[i]->data, buf[i], offset[i]); + uma[i] = buf[i] != nullptr; + } + if (!uma[i]) { + buf[i] = buf_ctx[i]->dev_buffer; + offset[i] = vk_tensor_offset(tensors[i]) + tensors[i]->view_offs; + } + GGML_ASSERT(buf[i] != nullptr); + } + // If any remaining descriptors are unused, just point them at src[0] + for (uint32_t i = num_tensors; i < MAX_PARAMETER_COUNT; ++i) { + buf[i] = buf[0]; + offset[i] = 0; + } + + std::array elements; + + uint32_t ne = ggml_nelements(dst); + if (ne > 262144) { + elements = { 512, 512, CEIL_DIV(ne, 262144) }; + } else if (ne > 512) { + elements = { 512, CEIL_DIV(ne, 512), 1 }; + } else { + elements = { ne, 1, 1 }; + } + + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, + { + vk_subbuffer{ buf[0], offset[0], VK_WHOLE_SIZE }, + vk_subbuffer{ buf[1], offset[1], VK_WHOLE_SIZE }, + vk_subbuffer{ buf[2], offset[2], VK_WHOLE_SIZE }, + vk_subbuffer{ buf[3], offset[3], VK_WHOLE_SIZE }, + vk_subbuffer{ buf[4], offset[4], VK_WHOLE_SIZE }, + vk_subbuffer{ buf[5], offset[5], VK_WHOLE_SIZE }, + vk_subbuffer{ buf[6], offset[6], VK_WHOLE_SIZE }, + vk_subbuffer{ buf[7], offset[7], VK_WHOLE_SIZE }, + }, pc, elements); +} + static void ggml_vk_add(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, bool dryrun = false) { const uint32_t src0_type_size = ggml_type_size(src0->type); const uint32_t src1_type_size = ggml_type_size(src1->type); @@ -8045,6 +8223,12 @@ static void ggml_vk_opt_step_adamw(ggml_backend_vk_context * ctx, vk_context& su ); } +static void ggml_vk_opt_step_sgd(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_tensor * src0, const ggml_tensor * src1, const ggml_tensor * src2, ggml_tensor * dst, bool dryrun = false) { + const size_t n = ggml_nelements(dst->src[0]); + + ggml_vk_op_f32(ctx, subctx, src0, src1, src2, dst, GGML_OP_OPT_STEP_SGD, { (uint32_t)n, 0, 0.0f, 0.0f }, dryrun); +} + static void ggml_vk_concat(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, bool dryrun = false) { int * op_params = (int *)dst->op_params; @@ -8097,6 +8281,10 @@ static void ggml_vk_sqr(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_SQR, vk_op_unary_push_constants_init(src0, dst), dryrun); } +static void ggml_vk_sqrt(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_tensor * src0, ggml_tensor * dst, bool dryrun = false) { + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_SQRT, vk_op_unary_push_constants_init(src0, dst), dryrun); +} + static void ggml_vk_sin(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_tensor * src0, ggml_tensor * dst, bool dryrun = false) { ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_SIN, vk_op_unary_push_constants_init(src0, dst), dryrun); } @@ -8336,7 +8524,7 @@ static void ggml_vk_rope(ggml_backend_vk_context * ctx, vk_context& subctx, cons (uint32_t)src0->ne[0], (uint32_t)n_dims, freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, {corr_dims[0], corr_dims[1]}, theta_scale, src2 != nullptr, (uint32_t)src0->ne[2], s1, s2, - sections[0], sections[1], sections[2], sections[3], backprop + { sections[0], sections[1], sections[2], sections[3] }, backprop }, dryrun); } @@ -8345,16 +8533,8 @@ static void ggml_vk_argsort(ggml_backend_vk_context * ctx, vk_context& subctx, c uint32_t ncols = src0->ne[0]; - uint32_t ncols_pad = 1; - while (ncols_pad < ncols) { - ncols_pad *= 2; - } - - GGML_ASSERT(ncols_pad <= 1024); - ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_ARGSORT, { ncols, - ncols_pad, op_params[0], }, dryrun); } @@ -8368,7 +8548,7 @@ static void ggml_vk_sum_rows(ggml_backend_vk_context * ctx, vk_context& subctx, } static void ggml_vk_argmax(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_tensor * src0, ggml_tensor * dst, bool dryrun = false) { - ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_ARGMAX, { (uint32_t)src0->ne[0], 0, 0.0f, 0.0f }, dryrun); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_ARGMAX, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], 0.0f, 0.0f }, dryrun); } static void ggml_vk_count_equal(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, bool dryrun = false) { @@ -9560,6 +9740,7 @@ static bool ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_cgraph * cgr case GGML_OP_UPSCALE: case GGML_OP_SCALE: case GGML_OP_SQR: + case GGML_OP_SQRT: case GGML_OP_SIN: case GGML_OP_COS: case GGML_OP_CLAMP: @@ -9598,11 +9779,11 @@ static bool ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_cgraph * cgr case GGML_OP_LEAKY_RELU: case GGML_OP_FLASH_ATTN_EXT: case GGML_OP_OPT_STEP_ADAMW: + case GGML_OP_OPT_STEP_SGD: break; default: std::cerr << "ggml_vulkan: Error: Missing op: " << ggml_op_name(node->op) << std::endl; GGML_ABORT("fatal error"); - return false; } vk_context compute_ctx; @@ -9629,6 +9810,7 @@ static bool ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_cgraph * cgr case GGML_OP_UPSCALE: case GGML_OP_SCALE: case GGML_OP_SQR: + case GGML_OP_SQRT: case GGML_OP_SIN: case GGML_OP_COS: case GGML_OP_CLAMP: @@ -9662,6 +9844,7 @@ static bool ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_cgraph * cgr case GGML_OP_CONV_2D: case GGML_OP_CONV_2D_DW: case GGML_OP_LEAKY_RELU: + case GGML_OP_OPT_STEP_SGD: { // These operations all go through ggml_vk_op_f32, so short-circuit and // do the only thing needed for the dryrun. @@ -9692,8 +9875,11 @@ static bool ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_cgraph * cgr break; case GGML_OP_ADD: - ggml_vk_add(ctx, compute_ctx, src0, src1, node, dryrun); - + if (ctx->num_additional_fused_ops) { + ggml_vk_multi_add(ctx, compute_ctx, cgraph, node_idx, dryrun); + } else { + ggml_vk_add(ctx, compute_ctx, src0, src1, node, dryrun); + } break; case GGML_OP_SUB: ggml_vk_sub(ctx, compute_ctx, src0, src1, node, dryrun); @@ -9726,6 +9912,10 @@ static bool ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_cgraph * cgr case GGML_OP_SQR: ggml_vk_sqr(ctx, compute_ctx, src0, node, dryrun); + break; + case GGML_OP_SQRT: + ggml_vk_sqrt(ctx, compute_ctx, src0, node, dryrun); + break; case GGML_OP_SIN: ggml_vk_sin(ctx, compute_ctx, src0, node, dryrun); @@ -9911,6 +10101,11 @@ static bool ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_cgraph * cgr case GGML_OP_OPT_STEP_ADAMW: ggml_vk_opt_step_adamw(ctx, compute_ctx, node, dryrun); + break; + + case GGML_OP_OPT_STEP_SGD: + ggml_vk_opt_step_sgd(ctx, compute_ctx, src0, src1, src2, node, dryrun); + break; default: return false; @@ -9972,6 +10167,7 @@ static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_cgraph * case GGML_OP_UPSCALE: case GGML_OP_SCALE: case GGML_OP_SQR: + case GGML_OP_SQRT: case GGML_OP_SIN: case GGML_OP_COS: case GGML_OP_CLAMP: @@ -10014,8 +10210,8 @@ static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_cgraph * case GGML_OP_REPEAT: case GGML_OP_REPEAT_BACK: case GGML_OP_OPT_STEP_ADAMW: + case GGML_OP_OPT_STEP_SGD: buf = tensor->buffer; - break; case GGML_OP_UNARY: switch (ggml_get_unary_op(tensor)) { @@ -10570,6 +10766,58 @@ static bool ggml_vk_can_fuse(const struct ggml_cgraph * cgraph, int node_idx, st return true; } +static uint32_t ggml_vk_fuse_multi_add(ggml_backend_vk_context * ctx, const struct ggml_cgraph * cgraph, int node_idx) { + + const ggml_tensor *first_node = cgraph->nodes[node_idx]; + if (first_node->op != GGML_OP_ADD) { + return 0; + } + + if (!ctx->device->multi_add) { + return 0; + } + + int32_t num_adds = 1; + while (node_idx + num_adds < cgraph->n_nodes && + cgraph->nodes[node_idx + num_adds]->op == GGML_OP_ADD && + num_adds < MAX_FUSED_ADDS) { + num_adds++; + } + + // The shader currently requires same shapes (but different strides are allowed), + // everything f32, and no misalignment + for (int32_t i = 0; i < num_adds; ++i) { + const ggml_tensor *next_node = cgraph->nodes[node_idx + i]; + if (!ggml_are_same_shape(first_node, next_node->src[0]) || + !ggml_are_same_shape(first_node, next_node->src[1]) || + next_node->type != GGML_TYPE_F32 || + next_node->src[0]->type != GGML_TYPE_F32 || + next_node->src[1]->type != GGML_TYPE_F32 || + get_misalign_bytes(ctx, next_node) || + get_misalign_bytes(ctx, next_node->src[0]) || + get_misalign_bytes(ctx, next_node->src[1])) { + num_adds = i; + } + } + + // Verify we can fuse these + ggml_op adds[MAX_FUSED_ADDS]; + for (int32_t i = 0; i < num_adds; ++i) { + adds[i] = GGML_OP_ADD; + } + + // decrease num_adds if they can't all be fused + while (num_adds > 1 && !ggml_can_fuse(cgraph, node_idx, adds, num_adds)) { + num_adds--; + } + + // a single add is not "fused", so just return zero + if (num_adds == 1) { + return 0; + } + return num_adds; +} + static ggml_status ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { VK_LOG_DEBUG("ggml_backend_vk_graph_compute(" << cgraph->n_nodes << " nodes)"); ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; @@ -10583,8 +10831,13 @@ static ggml_status ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml_cg uint64_t total_mat_mul_bytes = 0; for (int i = 0; i < cgraph->n_nodes; i++) { - if (!ctx->device->disable_fusion && ggml_vk_can_fuse(cgraph, i, { GGML_OP_RMS_NORM, GGML_OP_MUL })) { - ctx->num_additional_fused_ops = 1; + if (!ctx->device->disable_fusion) { + uint32_t num_adds = ggml_vk_fuse_multi_add(ctx, cgraph, i); + if (num_adds) { + ctx->num_additional_fused_ops = num_adds - 1; + } else if (ggml_vk_can_fuse(cgraph, i, { GGML_OP_RMS_NORM, GGML_OP_MUL })) { + ctx->num_additional_fused_ops = 1; + } } ggml_vk_build_graph(ctx, cgraph, i, nullptr, 0, true, false, false, false); if (cgraph->nodes[i]->op == GGML_OP_MUL_MAT || cgraph->nodes[i]->op == GGML_OP_MUL_MAT_ID) { @@ -10659,8 +10912,13 @@ static ggml_status ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml_cg mul_mat_bytes += ggml_nbytes(cgraph->nodes[i]->src[0]); } - if (!ctx->device->disable_fusion && ggml_vk_can_fuse(cgraph, i, { GGML_OP_RMS_NORM, GGML_OP_MUL })) { - ctx->num_additional_fused_ops = 1; + if (!ctx->device->disable_fusion) { + uint32_t num_adds = ggml_vk_fuse_multi_add(ctx, cgraph, i); + if (num_adds) { + ctx->num_additional_fused_ops = num_adds - 1; + } else if (ggml_vk_can_fuse(cgraph, i, { GGML_OP_RMS_NORM, GGML_OP_MUL })) { + ctx->num_additional_fused_ops = 1; + } } // Signal the almost_ready fence when the graph is mostly complete (< 20% remaining) @@ -10881,7 +11139,6 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm default: return false; } - break; case GGML_OP_GLU: switch (ggml_get_glu_op(op)) { case GGML_GLU_OP_GEGLU: @@ -10897,7 +11154,6 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm default: return false; } - break; case GGML_OP_MUL_MAT: case GGML_OP_MUL_MAT_ID: { @@ -10961,7 +11217,7 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm } return true; - } break; + } case GGML_OP_FLASH_ATTN_EXT: { ggml_backend_vk_device_context * ctx = (ggml_backend_vk_device_context *)dev->context; @@ -11051,7 +11307,7 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm default: return false; } - } break; + } case GGML_OP_SET_ROWS: { switch (op->type) { @@ -11068,7 +11324,7 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm default: return false; } - } break; + } case GGML_OP_CONT: case GGML_OP_CPY: case GGML_OP_DUP: @@ -11120,7 +11376,7 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm return true; } return false; - } break; + } case GGML_OP_REPEAT: return ggml_type_size(op->type) == sizeof(float) && ggml_type_size(op->src[0]->type) == sizeof(float); case GGML_OP_REPEAT_BACK: @@ -11151,10 +11407,16 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm case GGML_OP_SILU_BACK: case GGML_OP_RMS_NORM_BACK: case GGML_OP_SQR: + case GGML_OP_SQRT: case GGML_OP_SIN: case GGML_OP_COS: case GGML_OP_CLAMP: + case GGML_OP_LEAKY_RELU: + case GGML_OP_OPT_STEP_ADAMW: + case GGML_OP_OPT_STEP_SGD: return op->src[0]->type == GGML_TYPE_F32; + case GGML_OP_ARGSORT: + return op->ne[0] <= max_argsort_cols; case GGML_OP_UPSCALE: case GGML_OP_ACC: case GGML_OP_CONCAT: @@ -11164,7 +11426,6 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm case GGML_OP_DIAG_MASK_INF: case GGML_OP_SOFT_MAX: case GGML_OP_SOFT_MAX_BACK: - case GGML_OP_ARGSORT: case GGML_OP_SUM: case GGML_OP_SUM_ROWS: case GGML_OP_ARGMAX: @@ -11175,8 +11436,6 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm case GGML_OP_POOL_2D: case GGML_OP_RWKV_WKV6: case GGML_OP_RWKV_WKV7: - case GGML_OP_LEAKY_RELU: - case GGML_OP_OPT_STEP_ADAMW: return true; case GGML_OP_CONV_TRANSPOSE_1D: return op->src[0]->type == GGML_TYPE_F32 && op->src[1]->type == GGML_TYPE_F32; @@ -11593,6 +11852,8 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_cgraph * tensor_clone = ggml_scale_bias(ggml_ctx, src_clone[0], params[0], params[1]); } else if (tensor->op == GGML_OP_SQR) { tensor_clone = ggml_sqr(ggml_ctx, src_clone[0]); + } else if (tensor->op == GGML_OP_SQRT) { + tensor_clone = ggml_sqrt(ggml_ctx, src_clone[0]); } else if (tensor->op == GGML_OP_SIN) { tensor_clone = ggml_sin(ggml_ctx, src_clone[0]); } else if (tensor->op == GGML_OP_COS) { @@ -11694,6 +11955,8 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_cgraph * } else { tensor_clone = ggml_glu_split(ggml_ctx, src_clone[0], src_clone[1], (ggml_glu_op) tensor->op_params[0]); } + ggml_set_op_params_i32(tensor_clone, 2, ggml_get_op_params_i32(tensor, 2)); + ggml_set_op_params_i32(tensor_clone, 3, ggml_get_op_params_i32(tensor, 3)); } else if (tensor->op == GGML_OP_CPY || tensor->op == GGML_OP_DUP) { if (src1 == nullptr) { tensor_clone = ggml_dup(ggml_ctx, src_clone[0]); @@ -11774,6 +12037,12 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_cgraph * src_clone[0]->flags = src0->flags; tensor_clone = ggml_opt_step_adamw(ggml_ctx, src_clone[0], src_clone[1], src_clone[2], src_clone[3], src_clone[4]); + } else if (tensor->op == GGML_OP_OPT_STEP_SGD) { + src_clone[0]->flags = src0->flags; + tensor_clone = ggml_opt_step_sgd(ggml_ctx, src_clone[0], src_clone[1], + src_clone[2]); + } else if (tensor->op == GGML_OP_ADD_ID) { + tensor_clone = ggml_add_id(ggml_ctx, src_clone[0], src_clone[1], src_clone[2]); } else { std::cerr << "Missing vk_check_results OP: " << ggml_op_name(tensor->op) << std::endl; diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/argmax.comp b/ggml/src/ggml-vulkan/vulkan-shaders/argmax.comp index eaf4da341e..a1d4c240dd 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/argmax.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/argmax.comp @@ -5,6 +5,8 @@ #extension GL_EXT_control_flow_attributes : enable +#define FLT_MAX 3.402823466e+38F + layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; @@ -19,19 +21,26 @@ void main() { const uint row = gl_WorkGroupID.z * 262144 + gl_WorkGroupID.y * 512 + gl_WorkGroupID.x; const uint col = gl_LocalInvocationID.x; - if (col >= p.KX) { + if (row >= p.KY) { return; } - A_TYPE amax = data_a[row*p.KX + col]; - tmp[col] = col; + + A_TYPE amax = -FLT_MAX; + uint acol = col; + + if (col < p.KX) { + amax = data_a[row*p.KX + col]; + } for (uint i = col + BLOCK_SIZE; i < p.KX; i += BLOCK_SIZE) { A_TYPE val = data_a[row*p.KX + i]; if (val > amax) { amax = val; - tmp[col] = i; + acol = i; } } + + tmp[col] = acol; tmpmax[col] = amax; barrier(); diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/argsort.comp b/ggml/src/ggml-vulkan/vulkan-shaders/argsort.comp index d4fa45b1e1..dc53a401e0 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/argsort.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/argsort.comp @@ -1,22 +1,24 @@ #version 450 +#extension GL_EXT_control_flow_attributes : enable #include "types.comp" -#define BLOCK_SIZE 1024 +layout(constant_id = 0) const int BLOCK_SIZE = 1024; +layout(constant_id = 1) const int BLOCK_SIZE_LOG2 = 10; #define ASC 0 -layout(local_size_x = BLOCK_SIZE, local_size_y = 1, local_size_z = 1) in; +layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) buffer D {int data_d[];}; layout (push_constant) uniform parameter { uint ncols; - uint ncols_pad; uint order; } p; shared int dst_row[BLOCK_SIZE]; +shared A_TYPE a_sh[BLOCK_SIZE]; void swap(uint idx0, uint idx1) { int tmp = dst_row[idx0]; @@ -24,7 +26,7 @@ void swap(uint idx0, uint idx1) { dst_row[idx1] = tmp; } -void main() { +void argsort(bool needs_bounds_check) { // bitonic sort const int col = int(gl_LocalInvocationID.x); const uint row = gl_WorkGroupID.y; @@ -32,38 +34,46 @@ void main() { const uint row_offset = row * p.ncols; // initialize indices - if (col < p.ncols_pad) { - dst_row[col] = col; - } + dst_row[col] = col; + a_sh[col] = data_a[row_offset + col]; barrier(); - for (uint k = 2; k <= p.ncols_pad; k *= 2) { - for (uint j = k / 2; j > 0; j /= 2) { - const uint ixj = col ^ j; - if (col < p.ncols_pad && ixj > col) { - if ((col & k) == 0) { - if (dst_row[col] >= p.ncols || - (dst_row[ixj] < p.ncols && (p.order == ASC ? - data_a[row_offset + dst_row[col]] > data_a[row_offset + dst_row[ixj]] : - data_a[row_offset + dst_row[col]] < data_a[row_offset + dst_row[ixj]])) - ) { - swap(col, ixj); - } - } else { - if (dst_row[ixj] >= p.ncols || - (dst_row[col] < p.ncols && (p.order == ASC ? - data_a[row_offset + dst_row[col]] < data_a[row_offset + dst_row[ixj]] : - data_a[row_offset + dst_row[col]] > data_a[row_offset + dst_row[ixj]])) - ) { - swap(col, ixj); - } - } + uint num_outer_loop_iters = BLOCK_SIZE_LOG2; + [[unroll]] for (uint k = 2, outer_idx = 0; outer_idx < num_outer_loop_iters; k *= 2, outer_idx++) { + uint num_inner_loop_iters = outer_idx + 1; + [[unroll]] for (uint j = k / 2, inner_idx = 0; inner_idx < num_inner_loop_iters; j /= 2, inner_idx++) { + const int ixj = int(col ^ j); + + int idx_0 = (col & k) == 0 ? col : ixj; + int idx_1 = (col & k) == 0 ? ixj : col; + + int sh_idx_0 = dst_row[idx_0]; + int sh_idx_1 = dst_row[idx_1]; + bool idx_0_oob = needs_bounds_check ? sh_idx_0 >= p.ncols : false; + bool idx_1_oob = needs_bounds_check ? sh_idx_1 >= p.ncols : false; + + if ((idx_0_oob || + (!idx_1_oob && a_sh[sh_idx_0] > a_sh[sh_idx_1])) && (ixj > col)) { + swap(idx_0, idx_1); } + barrier(); } } if (col < p.ncols) { - data_d[row_offset + col] = dst_row[col]; + if (p.order == ASC) { + data_d[row_offset + col] = dst_row[col]; + } else { + data_d[row_offset + p.ncols - col - 1] = dst_row[col]; + } + } +} + +void main() { + if (p.ncols == BLOCK_SIZE) { + argsort(false); + } else { + argsort(true); } } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm1.comp b/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm1.comp index 230e815f22..81cc3f81fc 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm1.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm1.comp @@ -210,7 +210,7 @@ void main() { [[unroll]] for (uint32_t d = 0; d < HSV_per_thread / 4; ++d) { [[unroll]] for (uint32_t r = 0; r < rows_per_thread; ++r) { - Of[r][d] = float16_t(eMf[r]) * Of[r][d]; + Of[r][d] = ACC_TYPE(eMf[r]) * Of[r][d]; } } [[unroll]] for (uint32_t r = 0; r < rows_per_thread; ++r) { @@ -233,7 +233,7 @@ void main() { vec4 Vf = vec4(data_vv4[v_offset / 4 + (j * Bc + c * cols_per_iter + col_tid) * v_stride / 4 + d * D_split + d_tid]); #endif [[unroll]] for (uint32_t r = 0; r < rows_per_thread; ++r) { - Of[r][d] += float16_t(Pf[r]) * ACC_TYPEV4(Vf); + Of[r][d] += ACC_TYPE(Pf[r]) * ACC_TYPEV4(Vf); } } } @@ -288,7 +288,7 @@ void main() { [[unroll]] for (uint32_t r = 0; r < rows_per_thread; ++r) { [[unroll]] for (uint32_t d = 0; d < HSV_per_thread / 4; ++d) { - Of[r][d] = float16_t(eMf[r]) * Of[r][d]; + Of[r][d] = ACC_TYPE(eMf[r]) * Of[r][d]; tmpshv4[tid] = Of[r][d]; barrier(); @@ -357,7 +357,7 @@ void main() { [[unroll]] for (uint32_t d = 0; d < HSV_per_thread / 4; ++d) { [[unroll]] for (uint32_t r = 0; r < rows_per_thread; ++r) { - Of[r][d] *= float16_t(Lfrcp[r]); + Of[r][d] *= ACC_TYPE(Lfrcp[r]); } } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/generic_binary_head.comp b/ggml/src/ggml-vulkan/vulkan-shaders/generic_binary_head.comp index 4b4316cf3d..750e785753 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/generic_binary_head.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/generic_binary_head.comp @@ -2,6 +2,7 @@ #extension GL_EXT_control_flow_attributes : require #include "rte.comp" +#include "utils.comp" layout (push_constant) uniform parameter { @@ -28,25 +29,9 @@ uint get_aoffset() { return p.misalign_offsets >> 16; } uint get_boffset() { return (p.misalign_offsets >> 8) & 0xFF; } uint get_doffset() { return p.misalign_offsets & 0xFF; } -// mod and div are expensive and coordinates/dimensions are often power of 2 or equal to 1 -uint fastmod(uint a, uint b) { - if ((b & (b-1)) == 0) { - return a & (b-1); - } - return a % b; -} - -uint fastdiv(uint a, uint b) { - return (a < b) ? 0 : (a / b); -} void get_indices(uint idx, out uint i00, out uint i01, out uint i02, out uint i03) { - i03 = fastdiv(idx, (p.ne02*p.ne01*p.ne00)); - const uint i03_offset = i03 * p.ne02*p.ne01*p.ne00; - i02 = fastdiv((idx - i03_offset), (p.ne01*p.ne00)); - const uint i02_offset = i02*p.ne01*p.ne00; - i01 = (idx - i03_offset - i02_offset) / p.ne00; - i00 = idx - i03_offset - i02_offset - i01*p.ne00; + get_indices(idx, i00, i01, i02, i03, p.ne00, p.ne01, p.ne02, p.ne03); } uint src0_idx(uint i00, uint i01, uint i02, uint i03) { diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.comp index 903753c7e2..b93e9948f7 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.comp @@ -1,6 +1,10 @@ #extension GL_EXT_control_flow_attributes : enable #extension GL_EXT_shader_16bit_storage : require #extension GL_EXT_shader_8bit_storage : require +#if USE_SUBGROUP_ADD +#extension GL_KHR_shader_subgroup_basic : require +#extension GL_KHR_shader_subgroup_arithmetic : require +#endif #ifdef MUL_MAT_ID #define EXPERT_COUNT 8 @@ -90,7 +94,38 @@ layout (constant_id = 2) const uint NUM_COLS = 1; shared FLOAT_TYPE tmpsh[NUM_COLS][NUM_ROWS][BLOCK_SIZE]; -void reduce_result(const in FLOAT_TYPE temp[NUM_COLS][NUM_ROWS], const in uint32_t d_offset, const in uint32_t first_row, const in uint32_t num_rows, const in uint32_t tid) { +void reduce_result(FLOAT_TYPE temp[NUM_COLS][NUM_ROWS], const in uint32_t d_offset, const in uint32_t first_row, const in uint32_t num_rows, const in uint32_t tid) { + // subgroupAdd is probably faster on devices that support it, + // particularly when the workgroup has more than one subgroup +#if USE_SUBGROUP_ADD + // sum up partial sums within a subgroup + [[unroll]] for (uint j = 0; j < NUM_COLS; ++j) { + [[unroll]] for (uint n = 0; n < num_rows; ++n) { + temp[j][n] = subgroupAdd(temp[j][n]); + } + } + + // Go through shared memory to sum partials across subgroups + if (gl_SubgroupInvocationID == 0) { + [[unroll]] for (uint j = 0; j < NUM_COLS; ++j) { + [[unroll]] for (uint n = 0; n < num_rows; ++n) { + tmpsh[j][n][gl_SubgroupID] = temp[j][n]; + } + } + } + barrier(); + if (tid == 0) { + [[unroll]] for (uint j = 0; j < NUM_COLS; ++j) { + [[unroll]] for (uint n = 0; n < num_rows; ++n) { + temp[j][n] = FLOAT_TYPE(0); + [[unroll]] for (uint s = 0; s < gl_NumSubgroups; ++s) { + temp[j][n] += tmpsh[j][n][s]; + } + data_d[j*p.batch_stride_d + d_offset + first_row + n] = D_TYPE(temp[j][n]); + } + } + } +#else // sum up partial sums and write back result [[unroll]] for (uint j = 0; j < NUM_COLS; ++j) { [[unroll]] for (uint n = 0; n < num_rows; ++n) { @@ -115,4 +150,5 @@ void reduce_result(const in FLOAT_TYPE temp[NUM_COLS][NUM_ROWS], const in uint32 } } } +#endif } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp index 8c5114a79d..a61a464c7b 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp @@ -801,7 +801,7 @@ void main() { } #else const uint row_i = ic * BN + loadc_b + l; - if (row_i < _ne1) { + if (row_i < _ne1 && block + loadr_b < end_k) { const u16vec2 row_idx = row_ids[row_i]; buf_b[(loadc_b + l) * SHMEM_STRIDE + loadr_b] = TO_FLOAT_TYPE(data_b[pos_b + row_idx.y * p.batch_stride_b + (row_idx.x % p.ne11) * p.stride_b + loadr_b]); } else { @@ -875,7 +875,9 @@ void main() { const u16vec2 row_idx = row_ids[row_i]; - data_d[row_idx.y * p.batch_stride_d + row_idx.x * p.stride_d + dr + cm_row * TM + store_r] = D_TYPE(coopmat_stage[warp_i * TM * TN + (col + store_c) * TM + store_r]); + if (dr + cm_row * TM + store_r < p.M) { + data_d[row_idx.y * p.batch_stride_d + row_idx.x * p.stride_d + dr + cm_row * TM + store_r] = D_TYPE(coopmat_stage[warp_i * TM * TN + (col + store_c) * TM + store_r]); + } } } } @@ -925,7 +927,9 @@ void main() { #endif // MUL_MAT_ID [[unroll]] for (uint cr = 0; cr < TM; cr++) { #ifdef MUL_MAT_ID - data_d[row_idx.y * p.batch_stride_d + row_idx.x * p.stride_d + dr_warp + cr] = D_TYPE(sums[(wsic * TN + cc) * (WMITER * TM) + wsir * TM + cr]); + if (dr_warp + cr < p.M) { + data_d[row_idx.y * p.batch_stride_d + row_idx.x * p.stride_d + dr_warp + cr] = D_TYPE(sums[(wsic * TN + cc) * (WMITER * TM) + wsir * TM + cr]); + } #else if (dr_warp + cr < p.M && dc_warp + cc < p.N) { data_d[offsets + (dc_warp + cc) * p.stride_d + dr_warp + cr] = D_TYPE(sums[(wsic * TN + cc) * (WMITER * TM) + wsir * TM + cr]); diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/multi_add.comp b/ggml/src/ggml-vulkan/vulkan-shaders/multi_add.comp new file mode 100644 index 0000000000..0c7acb7060 --- /dev/null +++ b/ggml/src/ggml-vulkan/vulkan-shaders/multi_add.comp @@ -0,0 +1,68 @@ +#version 450 + +#extension GL_EXT_shader_16bit_storage : require +#extension GL_EXT_nonuniform_qualifier : enable +#extension GL_EXT_control_flow_attributes : require + +#include "rte.comp" +#include "types.comp" +#include "utils.comp" + +layout (push_constant) uniform parameter2 +{ + // shape for dst + uint ne20; uint ne21; uint ne22; uint ne23; + + // strides for srcs+dst + uint nb[8][4]; +} p; + +layout (binding = 0) readonly buffer A {A_TYPE data_a[];} a[]; +layout (binding = 0) writeonly buffer D {D_TYPE data_d[];} d[]; + +layout(constant_id = 0) const uint num_srcs = 2; + +uint src_idx(uint s, uint i00, uint i01, uint i02, uint i03) { + return i03*p.nb[s][3] + i02*p.nb[s][2] + i01*p.nb[s][1] + i00*p.nb[s][0]; +} + +uint dst_idx(uint i00, uint i01, uint i02, uint i03) { + uint nb20 = p.nb[num_srcs][0]; + uint nb21 = p.nb[num_srcs][1]; + uint nb22 = p.nb[num_srcs][2]; + uint nb23 = p.nb[num_srcs][3]; + return i03*nb23 + i02*nb22 + i01*nb21 + i00*nb20; +} + +uint get_idx() { + return gl_GlobalInvocationID.z * 262144 + gl_GlobalInvocationID.y * 512 + gl_GlobalInvocationID.x; +} + +const uint num_threads = 256; + +layout(local_size_x = num_threads, local_size_y = 1, local_size_z = 1) in; + +void main() { + uint idx = get_idx(); + + uint ne = p.ne20 * p.ne21 * p.ne22 * p.ne23; + + // num_threads * num_iter must equal 512, to match the wg_denoms and get_idx calculation + const uint num_iter = 2; + + [[unroll]] for (uint i = 0; i < num_iter; ++i) { + if (idx >= ne) { + continue; + } + uint i00, i01, i02, i03; + get_indices(idx, i00, i01, i02, i03, p.ne20, p.ne21, p.ne22, p.ne23); + + FLOAT_TYPE sum = FLOAT_TYPE(0); + [[unroll]] for (uint s = 0; s < num_srcs; ++s) { + sum += FLOAT_TYPE(a[s].data_a[src_idx(s, i00, i01, i02, i03)]); + } + d[num_srcs].data_d[dst_idx(i00, i01, i02, i03)] = D_TYPE(sum); + + idx += num_threads; + } +} diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/opt_step_sgd.comp b/ggml/src/ggml-vulkan/vulkan-shaders/opt_step_sgd.comp new file mode 100644 index 0000000000..6426dedee5 --- /dev/null +++ b/ggml/src/ggml-vulkan/vulkan-shaders/opt_step_sgd.comp @@ -0,0 +1,22 @@ +#version 450 + +#include "generic_head.comp" + +layout(local_size_x = 512, local_size_y = 1, local_size_z = 1) in; + +layout (binding = 0) buffer X {A_TYPE data_x[];}; +layout (binding = 1) readonly buffer G {A_TYPE data_grad[];}; +layout (binding = 2) readonly buffer P {float data_params[2];}; + +void main() { + const uint i = gl_GlobalInvocationID.z * 262144 + gl_GlobalInvocationID.y * 512 + gl_GlobalInvocationID.x; + + if (i >= p.KX) { + return; + } + + const float alpha = data_params[0]; + const float keep = 1.f - alpha * data_params[1]; + + data_x[i] = data_x[i] * keep - alpha * data_grad[i]; +} diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/sqrt.comp b/ggml/src/ggml-vulkan/vulkan-shaders/sqrt.comp new file mode 100644 index 0000000000..4bc697b9b9 --- /dev/null +++ b/ggml/src/ggml-vulkan/vulkan-shaders/sqrt.comp @@ -0,0 +1,17 @@ +#version 450 + +#include "types.comp" +#include "generic_unary_head.comp" + +layout(local_size_x = 512, local_size_y = 1, local_size_z = 1) in; + +void main() { + const uint idx = get_idx(); + + if (idx >= p.ne) { + return; + } + + const FLOAT_TYPE val = FLOAT_TYPE(data_a[get_aoffset() + src0_idx(idx)]); + data_d[get_doffset() + dst_idx(idx)] = D_TYPE(sqrt(val)); +} diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/utils.comp b/ggml/src/ggml-vulkan/vulkan-shaders/utils.comp new file mode 100644 index 0000000000..dc4a1e6d96 --- /dev/null +++ b/ggml/src/ggml-vulkan/vulkan-shaders/utils.comp @@ -0,0 +1,25 @@ +#ifndef UTILS_COMP +#define UTILS_COMP + +// mod and div are expensive and coordinates/dimensions are often power of 2 or equal to 1 +uint fastmod(uint a, uint b) { + if ((b & (b-1)) == 0) { + return a & (b-1); + } + return a % b; +} + +uint fastdiv(uint a, uint b) { + return (a < b) ? 0 : (a / b); +} + +void get_indices(uint idx, out uint i00, out uint i01, out uint i02, out uint i03, uint ne00, uint ne01, uint ne02, uint ne03) { + i03 = fastdiv(idx, (ne02*ne01*ne00)); + const uint i03_offset = i03 * ne02*ne01*ne00; + i02 = fastdiv((idx - i03_offset), (ne01*ne00)); + const uint i02_offset = i02*ne01*ne00; + i01 = (idx - i03_offset - i02_offset) / ne00; + i00 = idx - i03_offset - i02_offset - i01*ne00; +} + +#endif // UTILS_COMP diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp b/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp index 4cd94c51e3..75c572d6fc 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp @@ -223,7 +223,8 @@ void string_to_spv_func(const std::string& _name, const std::string& in_fname, c std::string target_env = (name.find("_cm2") != std::string::npos) ? "--target-env=vulkan1.3" : "--target-env=vulkan1.2"; // disable spirv-opt for coopmat shaders for https://github.com/ggerganov/llama.cpp/issues/10734 - std::string opt_level = coopmat ? "" : "-O"; + // disable spirv-opt for bf16 shaders for https://github.com/ggml-org/llama.cpp/issues/15344 + std::string opt_level = (coopmat || name.find("bf16") != std::string::npos) ? "" : "-O"; #ifdef _WIN32 std::vector cmd = {GLSLC, "-fshader-stage=compute", target_env, opt_level, "\"" + in_path + "\"", "-o", "\"" + out_fname + "\""}; @@ -472,6 +473,9 @@ void process_shaders() { string_to_spv("mul_mat_vec_" + tname + "_f32_f32", shader, merge_maps(base_dict, {{data_a_key, "1"}, {"B_TYPE", "float"}, {"B_TYPE_VEC2", "vec2"}, {"B_TYPE_VEC4", "vec4"}, {"D_TYPE", "float"}})); string_to_spv("mul_mat_vec_" + tname + "_f16_f32", shader, merge_maps(base_dict, {{data_a_key, "1"}, {"B_TYPE", "float16_t"}, {"B_TYPE_VEC2", "f16vec2"}, {"B_TYPE_VEC4", "f16vec4"}, {"D_TYPE", "float"}})); + string_to_spv("mul_mat_vec_" + tname + "_f32_f32_subgroup", shader, merge_maps(base_dict, {{data_a_key, "1"}, {"B_TYPE", "float"}, {"B_TYPE_VEC2", "vec2"}, {"B_TYPE_VEC4", "vec4"}, {"D_TYPE", "float"}, {"USE_SUBGROUP_ADD", "1"}})); + string_to_spv("mul_mat_vec_" + tname + "_f16_f32_subgroup", shader, merge_maps(base_dict, {{data_a_key, "1"}, {"B_TYPE", "float16_t"}, {"B_TYPE_VEC2", "f16vec2"}, {"B_TYPE_VEC4", "f16vec4"}, {"D_TYPE", "float"}, {"USE_SUBGROUP_ADD", "1"}})); + string_to_spv("mul_mat_vec_id_" + tname + "_f32", shader, merge_maps(base_dict, {{"MUL_MAT_ID", "1"}, {data_a_key, "1"}, {"B_TYPE", "float"}, {"B_TYPE_VEC2", "vec2"}, {"B_TYPE_VEC4", "vec4"}, {"D_TYPE", "float"}})); // Dequant shaders @@ -566,6 +570,8 @@ void process_shaders() { string_to_spv("sqr_f32", "square.comp", {{"A_TYPE", "float"}, {"D_TYPE", "float"}, {"FLOAT_TYPE", "float"}}); + string_to_spv("sqrt_f32", "sqrt.comp", {{"A_TYPE", "float"}, {"D_TYPE", "float"}, {"FLOAT_TYPE", "float"}}); + string_to_spv("sin_f32", "sin.comp", {{"A_TYPE", "float"}, {"D_TYPE", "float"}, {"FLOAT_TYPE", "float"}}); string_to_spv("cos_f32", "cos.comp", {{"A_TYPE", "float"}, {"D_TYPE", "float"}, {"FLOAT_TYPE", "float"}}); @@ -657,6 +663,7 @@ void process_shaders() { string_to_spv("rwkv_wkv7_f32", "wkv7.comp", merge_maps(base_dict, {{"A_TYPE", "float"}})); string_to_spv("opt_step_adamw_f32", "opt_step_adamw.comp", merge_maps(base_dict, {{"A_TYPE", "float"}})); + string_to_spv("opt_step_sgd_f32", "opt_step_sgd.comp", merge_maps(base_dict, {{"A_TYPE", "float"}})); string_to_spv("conv2d_f32_unroll", "conv2d_mm.comp", {{"A_TYPE", "float"}, {"B_TYPE", "float"}, {"D_TYPE", "float"}, {"USE_COLLECTIVES", "1"}, {"UNROLL", "[[unroll]]"}}); string_to_spv("conv2d_f16_f32_unroll", "conv2d_mm.comp", {{"A_TYPE", "float16_t"}, {"B_TYPE", "float"}, {"D_TYPE", "float"}, {"USE_COLLECTIVES", "1"}, {"UNROLL", "[[unroll]]"}}); @@ -676,6 +683,8 @@ void process_shaders() { string_to_spv("add_id_f32", "add_id.comp", merge_maps(base_dict, {{"A_TYPE", "float"}, {"B_TYPE", "float"}, {"D_TYPE", "float"}})); + string_to_spv("multi_add_f32", "multi_add.comp", {{"A_TYPE", "float"}, {"B_TYPE", "float"}, {"D_TYPE", "float"}, {"FLOAT_TYPE", "float"}, {"RTE16", "1"}}); + for (auto &c : compiles) { c.wait(); } @@ -784,6 +793,18 @@ void write_output_files() { fputs(data.c_str(), src); fputs(len.c_str(), src); } + + for (const std::string& btype : {"f16", "f32"}) { + for (const auto& tname : type_names) { + fprintf(hdr, "extern unsigned char *arr_dmmv_%s_%s_f32_data[2];\n", tname.c_str(), btype.c_str()); + fprintf(hdr, "extern uint64_t arr_dmmv_%s_%s_f32_len[2];\n", tname.c_str(), btype.c_str()); + std::string data = "unsigned char *arr_dmmv_" + tname + "_" + btype + "_f32_data[2] = {mul_mat_vec_" + tname + "_" + btype + "_f32_data, mul_mat_vec_" + tname + "_" + btype + "_f32_subgroup_data};\n"; + std::string len = "uint64_t arr_dmmv_" + tname + "_" + btype + "_f32_len[2] = {mul_mat_vec_" + tname + "_" + btype + "_f32_len, mul_mat_vec_" + tname + "_" + btype + "_f32_subgroup_len};\n"; + fputs(data.c_str(), src); + fputs(len.c_str(), src); + } + } + fclose(hdr); fclose(src); } diff --git a/ggml/src/ggml-zdnn/CMakeLists.txt b/ggml/src/ggml-zdnn/CMakeLists.txt new file mode 100644 index 0000000000..0a723ce4de --- /dev/null +++ b/ggml/src/ggml-zdnn/CMakeLists.txt @@ -0,0 +1,36 @@ +if (DEFINED ZDNN_ROOT) + message(STATUS "zdnn: using ZDNN_ROOT override: ${ZDNN_ROOT}") + set(ZDNN_HINT "${ZDNN_ROOT}") +else() + set(ZDNN_HINT "") +endif() + +find_path(ZDNN_INCLUDE + NAMES zdnn.h + HINTS ${ZDNN_HINT} /usr /usr/local + PATH_SUFFIXES include) +if (ZDNN_INCLUDE) + message(STATUS "zdnn: found include: ${ZDNN_INCLUDE}") +else() + message(FATAL_ERROR "zdnn: include directory not found, please set ZDNN_ROOT to the proper path if necessary") +endif() + +find_library(ZDNN_LIB + NAMES zdnn + HINTS ${ZDNN_HINT} /usr /usr/local + PATH_SUFFIXES lib lib64) +if (ZDNN_LIB) + message(STATUS "zdnn: found library: ${ZDNN_LIB}") +else() + message(FATAL_ERROR "zdnn: library not found, please set ZDNN_ROOT to the proper path if necessary") +endif() + +file(GLOB GGML_SOURCES_ZDNN "*.c" "*.cpp") +file(GLOB GGML_HEADERS_ZDNN "*.h" "*.hpp") + +ggml_add_backend_library(ggml-zdnn ${GGML_HEADERS_ZDNN} ${GGML_SOURCES_ZDNN}) +target_link_libraries(ggml-zdnn PRIVATE ${ZDNN_LIB}) +target_include_directories(ggml-zdnn PRIVATE ${ZDNN_INCLUDE}) +target_link_directories(ggml-zdnn PRIVATE ${ZDNN_LIB}) + +target_compile_definitions(ggml-zdnn PRIVATE GGML_USE_ZDNN) diff --git a/ggml/src/ggml-zdnn/ggml-zdnn-impl.h b/ggml/src/ggml-zdnn/ggml-zdnn-impl.h new file mode 100644 index 0000000000..9dcb040fa8 --- /dev/null +++ b/ggml/src/ggml-zdnn/ggml-zdnn-impl.h @@ -0,0 +1,97 @@ +#ifndef GGML_ZDNN_IMPL +#define GGML_ZDNN_IMPL + +#include "zdnn.h" +#include "ggml.h" +#include "ggml-zdnn.h" + +#include +#include +#include + +#define GGML_ZDNN_NAME "zDNN" +#define GGML_ZDNN_VERSION ZDNN_VERNUM + +#define vec_neg(a) (-(a)) // Vector Negate +#define vec_add(a, b) ((a) + (b)) // Vector Add +#define vec_sub(a, b) ((a) - (b)) // Vector Subtract +#define vec_mul(a, b) ((a) * (b)) // Vector Multiply +#define vec_div(a, b) ((a) / (b)) // Vector Divide +#define vec_sl(a, b) ((a) << (b)) // Vector Shift Left +#define vec_sra(a, b) ((a) >> (b)) // Vector Shift Right +#define vec_sr(a, b) ((a) >> (b)) // Vector Shift Right Algebraic +#define vec_slo(a, b) vec_slb(a, (b) << 64) // Vector Shift Left by Octet +#define vec_sro(a, b) vec_srb(a, (b) << 64) // Vector Shift Right by Octet + +#ifndef vec_and +#define vec_and(a, b) ((a) & (b)) // Vector AND +#endif + +#ifndef vec_or +#define vec_or(a, b) ((a) | (b)) // Vector OR +#endif + +#ifndef vec_xor +#define vec_xor(a, b) ((a) ^ (b)) // Vector XOR +#endif + +typedef signed char char8x16_t __attribute__((vector_size(16))); +typedef unsigned char uchar8x16_t __attribute__((vector_size(16))); + +typedef int8_t int8x16_t __attribute__((vector_size(16))); +typedef int16_t int16x8_t __attribute__((vector_size(16))); +typedef int32_t int32x4_t __attribute__((vector_size(16))); +typedef uint8_t uint8x16_t __attribute__((vector_size(16))); +typedef uint16_t uint16x8_t __attribute__((vector_size(16))); +typedef uint32_t uint32x4_t __attribute__((vector_size(16))); + +typedef float float32x4_t __attribute__((vector_size(16))); +typedef double double64x2_t __attribute__((vector_size(16))); + +typedef signed long long long64x2_t __attribute__((vector_size(16))); +typedef unsigned long long ulong64x2_t __attribute__((vector_size(16))); + +#define ZDNN_CHECK(stmt) \ + do { \ + zdnn_status status = (stmt); \ + GGML_ASSERT(status == ZDNN_OK); \ + } while (0); + +struct ggml_backend_zdnn_device_context { + int zdnn_device; + int zdnn_device_ref_count; + + bool has_parmblkformat_0; + bool has_parmblkformat_1; + + size_t max_size; + + char name[128]; +}; + +struct ggml_backend_zdnn_context { + int device; + ggml_cgraph * gf; +}; + +struct ggml_backend_zdnn_buffer { + void * data; + size_t size; + + zdnn_tensor_desc pre_tfm_desc; + zdnn_tensor_desc tfm_desc; + zdnn_ztensor ztensor; + + char name[GGML_MAX_NAME]; +}; + +struct ggml_backend_zdnn_buffer_context { + void * all_data; + size_t all_size; + bool owned; + + int n_buffers; + std::vector> buffers; +}; + +#endif // GGML_ZDNN_IMPL diff --git a/ggml/src/ggml-zdnn/ggml-zdnn.cpp b/ggml/src/ggml-zdnn/ggml-zdnn.cpp new file mode 100644 index 0000000000..7507a52aea --- /dev/null +++ b/ggml/src/ggml-zdnn/ggml-zdnn.cpp @@ -0,0 +1,846 @@ +#include "zdnn.h" +#include "ggml-zdnn.h" +#include "ggml-zdnn-impl.h" + +#include "ggml-impl.h" +#include "ggml-backend-impl.h" + +#include +#include +#include +#include + +inline zdnn_data_types ggml_zdnn_type_mapping(ggml_type type) { + switch (type) { + case GGML_TYPE_F32: + return FP32; + case GGML_TYPE_F16: + return FP16; + case GGML_TYPE_BF16: + return BFLOAT; + case GGML_TYPE_I8: + return INT8; + case GGML_TYPE_I32: + return INT32; + case GGML_TYPE_Q8_0: + return INT8; + default: + GGML_ABORT("%s: fatal: unable to determine zTensor data type", + __func__); + break; + } +} + +inline void ggml_zdnn_create_tensor(zdnn_tensor_desc & pre_tfm_desc, + zdnn_tensor_desc & tfm_desc, + zdnn_ztensor & ztensor, + const ggml_tensor * src, + const int64_t * ne, + const zdnn_data_layouts layout) { + zdnn_init_pre_transformed_desc( + layout, + ggml_zdnn_type_mapping(src->type), + &pre_tfm_desc, + ne[3], ne[2], ne[1], ne[0] + ); + + ZDNN_CHECK(zdnn_generate_transformed_desc(&pre_tfm_desc, &tfm_desc)); + ZDNN_CHECK(zdnn_init_ztensor_with_malloc(&pre_tfm_desc, &tfm_desc, &ztensor)); +} + +inline void ggml_zdnn_load_tensor(zdnn_ztensor & ztensor, + void * buffer) { + ZDNN_CHECK(zdnn_transform_ztensor(&ztensor, buffer)); +} + +inline void ggml_zdnn_init_tensor(ggml_backend_zdnn_buffer * buffer, const ggml_tensor * tensor) { + switch (tensor->op) { + case GGML_OP_MUL_MAT: + { + zdnn_init_pre_transformed_desc( + ZDNN_2D, + ggml_zdnn_type_mapping(tensor->type), + &buffer->pre_tfm_desc, + tensor->ne[1], tensor->ne[0] + ); + } break; + + default: + { + // For 4D tensors, GGML uses NCHW layout. However, because zDNN + // automatically transforms everything to NHWC, we will use it + // directly to avoid the performance penalty changing the + // layout and reshaping the tensor. + zdnn_init_pre_transformed_desc( + ZDNN_NHWC, + ggml_zdnn_type_mapping(tensor->type), + &buffer->pre_tfm_desc, + tensor->ne[3], tensor->ne[2], tensor->ne[1], tensor->ne[0] + ); + + // TODO: Consider adding a ggml check. + // TODO: If tensor = 4D, use ZDNN_NCHW by default. + // TODO: If tensor = 2D, use ZDNN_NHWC by default. + } break; + } + + ZDNN_CHECK(zdnn_generate_transformed_desc(&buffer->pre_tfm_desc, &buffer->tfm_desc)); + ZDNN_CHECK(zdnn_init_ztensor_with_malloc(&buffer->pre_tfm_desc, &buffer->tfm_desc, &buffer->ztensor)); +} + +static void ggml_zdnn_mul_mat_op(ggml_backend_zdnn_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + GGML_TENSOR_BINARY_OP_LOCALS; + + const enum ggml_type type = src0->type; + + GGML_ASSERT(ne0 == ne01); + GGML_ASSERT(ne1 == ne11); + GGML_ASSERT(ne2 == ne12); + GGML_ASSERT(ne3 == ne13); + + // we don't support permuted src0 or src1 + GGML_ASSERT(nb00 == ggml_type_size(type)); + GGML_ASSERT(nb10 == ggml_type_size(src1->type)); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + const ggml_tensor * weights = src0; + const ggml_tensor * inputs = src1; + ggml_tensor * output = dst; + + ggml_backend_zdnn_buffer * weights_extra = (ggml_backend_zdnn_buffer *)weights->extra; + ggml_backend_zdnn_buffer * inputs_extra = (ggml_backend_zdnn_buffer *)inputs->extra; + ggml_backend_zdnn_buffer * output_extra = (ggml_backend_zdnn_buffer *)output->extra; + + zdnn_tensor_desc ptd_bias, td_bias; + zdnn_ztensor zt_bias; + + const int64_t weights_rows = ne01; + const int64_t weights_cols = ne00; + const int64_t inputs_rows = ne11; + const int64_t inputs_cols = ne10; + + assert(inputs_cols == weights_cols); + + const int64_t output_rows = ne1; + const int64_t output_cols = ne0; + + const int64_t bias_dim [GGML_MAX_DIMS] = { 1, 1, 1, output_cols }; + ggml_zdnn_create_tensor(ptd_bias, td_bias, zt_bias, output, bias_dim, ZDNN_1D); + + void * bias_data = (void *)calloc(ne0, ggml_element_size(output)); + if (weights_extra->ztensor.is_transformed == false) ggml_zdnn_load_tensor(weights_extra->ztensor, weights->data); + if (inputs_extra->ztensor.is_transformed == false) ggml_zdnn_load_tensor(inputs_extra->ztensor, inputs->data); + ggml_zdnn_load_tensor(zt_bias, bias_data); + + // GGML_LOG_INFO("%s: tensor '%s' tensor dimensions: [%ld, %ld, %ld, %ld] pre_tfm_desc dimensions: [%ld, %ld, %ld, %ld]\n", + // __func__, weights_extra->name, + // weights->ne[3], weights->ne[2], weights->ne[1], weights->ne[0], + // weights_extra->pre_tfm_desc.dim1, + // weights_extra->pre_tfm_desc.dim2, + // weights_extra->pre_tfm_desc.dim3, + // weights_extra->pre_tfm_desc.dim4); + + // GGML_LOG_INFO("%s: tensor '%s' tensor dimensions: [%ld, %ld, %ld, %ld] pre_tfm_desc dimensions: [%ld, %ld, %ld, %ld]\n", + // __func__, inputs_extra->name, + // inputs->ne[3], inputs->ne[2], inputs->ne[1], inputs->ne[0], + // inputs_extra->pre_tfm_desc.dim1, + // inputs_extra->pre_tfm_desc.dim2, + // inputs_extra->pre_tfm_desc.dim3, + // inputs_extra->pre_tfm_desc.dim4); + + GGML_ASSERT(weights_extra->pre_tfm_desc.dim1 == weights->ne[0] && "weights_extra->pre_tfm_desc.dim1 must match weights->ne[0]"); + GGML_ASSERT(weights_extra->pre_tfm_desc.dim2 == weights->ne[1] && "weights_extra->pre_tfm_desc.dim2 must match weights->ne[1]"); + GGML_ASSERT(inputs_extra->pre_tfm_desc.dim1 == inputs->ne[0] && "inputs_extra->pre_tfm_desc.dim1 must match inputs->ne[0]"); + GGML_ASSERT(inputs_extra->pre_tfm_desc.dim2 == inputs->ne[1] && "inputs_extra->pre_tfm_desc.dim2 must match inputs->ne[1]"); + + ZDNN_CHECK(zdnn_matmul_transpose_op(&inputs_extra->ztensor, &weights_extra->ztensor, &zt_bias, + false, true, MATMUL_OP_ADDITION, &output_extra->ztensor)); + // TODO: Remove in the future as we are currently DLF16 -> FP32 then in the next op, FP32 -> DLF16 again. Inefficient. + ZDNN_CHECK(zdnn_transform_origtensor(&output_extra->ztensor, output->data)); + + ZDNN_CHECK(zdnn_free_ztensor_buffer(&zt_bias)); + free(bias_data); +} + +static void ggml_zdnn_mul_mat_dispatch(ggml_backend_zdnn_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + bool use_mul_mat_vec = + (src0->type == GGML_TYPE_F16 || src0->type == GGML_TYPE_F16) + && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32 + && src0->ne[0] % 2 == 0 && src1->ne[1] == 1; + + bool use_mul_mat_vec_q = + ggml_is_quantized(src0->type) + && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32; + + bool use_mul_mat_q = + ggml_is_quantized(src0->type) + && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32; + + // debug helpers + // GGML_LOG_INFO("%s: use_mul_mat_vec = %d\n", __func__, use_mul_mat_vec); + // GGML_LOG_INFO("%s: use_mul_mat_vec_q = %d\n", __func__, use_mul_mat_vec_q); + // GGML_LOG_INFO("%s: use_mul_mat_q = %d\n", __func__, use_mul_mat_q); + // GGML_LOG_INFO("%s: src0: %8d %8d %8d %8d\n", __func__, src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3]); + // GGML_LOG_INFO("%s: %8d %8d %8d %8d\n", __func__, src0->nb[0], src0->nb[1], src0->nb[2], src0->nb[3]); + // GGML_LOG_INFO("%s: src1: %8d %8d %8d %8d\n", __func__, src1->ne[0], src1->ne[1], src1->ne[2], src1->ne[3]); + // GGML_LOG_INFO("%s: %8d %8d %8d %8d\n", __func__, src1->nb[0], src1->nb[1], src1->nb[2], src1->nb[3]); + // GGML_LOG_INFO("%s: src0 is contiguous %d, transposed %d, type = %s, name = %s\n", __func__, ggml_is_contiguous(src0), ggml_is_transposed(src0), ggml_type_name(src0->type), src0->name); + // GGML_LOG_INFO("%s: src1 is contiguous %d, transposed %d, type = %s, name = %s\n", __func__, ggml_is_contiguous(src1), ggml_is_transposed(src1), ggml_type_name(src1->type), src1->name); + + if (src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F16 + && !ggml_is_transposed(src0) && !ggml_is_transposed(src1) + && src1->ne[2] * src1->ne[3] > 1) { + // general KQ + KQV multi-batch + GGML_LOG_INFO("%s: using zdnn_mul_mat_batched for KQ + KQV multi-batch\n", __func__); + // ggml_zdnn_mul_mat_batched(ctx, src0, src1, dst); + } else if (use_mul_mat_vec) { + GGML_LOG_INFO("%s: using zdnn_op_mul_mat_vec for vector multiplication\n", __func__); + // ggml_zdnn_op_mul_mat(ctx, src0, src1, dst, ggml_zdnn_op_mul_mat_vec, nullptr); + } else if (use_mul_mat_vec_q) { + GGML_LOG_INFO("%s: using zdnn_op_mul_mat_vec_q for quantized vector multiplication\n", __func__); + // ggml_zdnn_op_mul_mat(ctx, src0, src1, dst, ggml_zdnn_op_mul_mat_vec_q, ggml_zdnn_quantize_row_q8_1); + } else if (use_mul_mat_q) { + GGML_LOG_INFO("%s: using zdnn_op_mul_mat_q for quantized matrix multiplication\n", __func__); + // ggml_zdnn_op_mul_mat(ctx, src0, src1, dst, ggml_zdnn_op_mul_mat_q, ggml_zdnn_quantize_mmq_q8_1); + } else { + // GGML_LOG_INFO("%s: using zdnn_op_mul_mat for general matrix multiplication\n", __func__); + ggml_zdnn_mul_mat_op(ctx, src0, src1, dst); + } +} + +static bool ggml_zdnn_compute_forward(ggml_backend_zdnn_context * ctx, ggml_tensor * dst) { + switch (dst->op) { + case GGML_OP_MUL_MAT: + ggml_zdnn_mul_mat_dispatch(ctx, dst->src[0], dst->src[1], dst); + break; + + default: + return false; + } + + return true; +} + +static enum ggml_status ggml_zdnn_graph_compute(ggml_backend_t backend, ggml_cgraph * gf) { + ggml_backend_zdnn_context * ctx = ( ggml_backend_zdnn_context *)backend->context; + ggml_backend_zdnn_device_context * ctx_dev = (ggml_backend_zdnn_device_context *)backend->device->context; + + ctx->gf = gf; + for (int i = 0; i < gf->n_nodes; i++) { + ggml_tensor * node = gf->nodes[i]; + + if (ggml_is_empty(node) + || node->op == GGML_OP_NONE + || node->op == GGML_OP_RESHAPE + || node->op == GGML_OP_VIEW + || node->op == GGML_OP_PERMUTE + || node->op == GGML_OP_TRANSPOSE) { + continue; + } + + bool ok = ggml_zdnn_compute_forward(ctx, node); + if (!ok) { + GGML_LOG_ERROR("%s: unsupported op %s (%s)\n", + __func__, node->name, ggml_op_name(node->op)); + } + + GGML_ASSERT(ok); + } + + return GGML_STATUS_SUCCESS; +} + +static bool ggml_zdnn_supports_op(const ggml_backend_zdnn_device_context * ctx_dev, const ggml_tensor * op) { + switch (op->op) { + case GGML_OP_NONE: + case GGML_OP_RESHAPE: + case GGML_OP_VIEW: + case GGML_OP_TRANSPOSE: + case GGML_OP_PERMUTE: + return true; + + case GGML_OP_MUL_MAT: + { + const ggml_tensor * src0 = op->src[0]; + const ggml_tensor * src1 = op->src[1]; + + const int64_t ne10 = src1->ne[0]; + const int64_t ne0 = op->ne[0]; + const int64_t ne1 = op->ne[1]; + + const int64_t max_batch = ctx_dev->max_size; + + return ggml_is_matrix(src0) && + ggml_is_matrix(src1) && + ggml_is_contiguous(src0) && + ggml_is_contiguous(src1) && + src0->view_src == nullptr && src1->view_src == nullptr && + src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32 && + (ne0 <= max_batch && ne1 <= max_batch && ne10 <= max_batch); + } break; + + default: + return false; + } +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// globals +// + +// initialised in ggml_backend_zdnn_reg +static ggml_backend_reg g_ggml_backend_zdnn_reg; +static ggml_backend_device g_ggml_backend_zdnn_device; + +static ggml_backend_zdnn_device_context g_ggml_ctx_dev_main = { + /* .zdnn_device = */ 0, + /* .zdnn_device_ref_count = */ 0, + /* .has_parmblkformat_0 = */ false, + /* .has_parmblkformat_1 = */ false, + /* .max_size = */ 0, + /* .name = */ "", +}; + +static int ggml_backend_zdnn_device_acq(ggml_backend_zdnn_device_context * ctx) { + assert(ctx != NULL); + + if (ctx->zdnn_device == 0) { + ctx->zdnn_device = 1; + } + + if (ctx->zdnn_device >= 1) { + ctx->has_parmblkformat_0 = zdnn_is_nnpa_parmblk_fmt_installed(1, NNPA_PARMBLKFORMAT_0); + ctx->has_parmblkformat_1 = zdnn_is_nnpa_parmblk_fmt_installed(1, NNPA_PARMBLKFORMAT_1); + ctx->max_size = zdnn_get_nnpa_max_dim_idx_size(); + strncpy(ctx->name, GGML_ZDNN_NAME, sizeof(ctx->name) - 1); + } + + ctx->zdnn_device_ref_count++; + return ctx->zdnn_device; +} + +static void ggml_backend_zdnn_device_rel(ggml_backend_zdnn_device_context * ctx) { + assert(ctx != NULL); + assert(ctx->zdnn_device_ref_count > 0); + + ctx->zdnn_device_ref_count--; + if (ctx->zdnn_device_ref_count == 0) { + if (ctx->zdnn_device >= 0) { + ctx->zdnn_device = 0; + } + } +} + +static ggml_backend_zdnn_context * ggml_zdnn_init(ggml_backend_dev_t dev) { + GGML_LOG_INFO("%s: allocating\n", __func__); + GGML_LOG_INFO("%s: found 1 device\n", __func__); + + #ifdef STATIC_LIB + zdnn_init(); + #endif + + ggml_backend_zdnn_context * ctx = new ggml_backend_zdnn_context(); + ggml_backend_zdnn_device_context * ctx_dev = (ggml_backend_zdnn_device_context *)dev->context; + + int device = 1; + GGML_LOG_INFO("%s: picking default device: %s\n", __func__, ctx_dev->name); + + ctx->device = device; + GGML_LOG_INFO("%s: NNPA name: %s\n", __func__, ctx_dev->name); + GGML_LOG_INFO("%s: NNPA_PARMBLKFORMAT_0 = %s\n", __func__, ctx_dev->has_parmblkformat_0 ? "true" : "false"); + GGML_LOG_INFO("%s: NNPA_PARMBLKFORMAT_1 = %s\n", __func__, ctx_dev->has_parmblkformat_1 ? "true" : "false"); + + ctx->gf = nullptr; + + return ctx; +} + +static void ggml_zdnn_free(ggml_backend_zdnn_context * ctx) { + GGML_LOG_INFO("%s: deallocating\n", __func__); + delete ctx; +} + +// +// backend interface +// + +static void ggml_backend_zdnn_buffer_free_buffer(ggml_backend_buffer_t buffer) { + ggml_backend_zdnn_buffer_context * ctx = (ggml_backend_zdnn_buffer_context *)buffer->context; + + for (int i = 0; i < ctx->n_buffers; i++) { + if (ctx->buffers[i]->ztensor.buffer != NULL && ctx->buffers[i]->ztensor.is_transformed) { + ZDNN_CHECK(zdnn_free_ztensor_buffer(&ctx->buffers[i]->ztensor)); + } + } + + delete ctx; +} + +static void * ggml_backend_zdnn_buffer_get_base(ggml_backend_buffer_t buffer) { + ggml_backend_zdnn_buffer_context * ctx = (ggml_backend_zdnn_buffer_context *)buffer->context; + return ctx->all_data; +} + +static enum ggml_status ggml_backend_zdnn_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { + if (tensor->view_src != NULL) { + assert(tensor->view_src->buffer->buft == buffer->buft); + return GGML_STATUS_SUCCESS; + } + + ggml_backend_zdnn_buffer_context * ctx = (ggml_backend_zdnn_buffer_context *)buffer->context; + + const int64_t tsize = ggml_nbytes(tensor); + int buffer_idx = ctx->n_buffers; + + std::unique_ptr zdnn_buffer = std::make_unique(); + zdnn_buffer->data = tensor->data; + zdnn_buffer->size = tsize; + strncpy(zdnn_buffer->name, tensor->name, GGML_MAX_NAME - 1); + + ggml_zdnn_init_tensor(zdnn_buffer.get(), tensor); + tensor->extra = zdnn_buffer.get(); + + ctx->buffers.push_back(std::move(zdnn_buffer)); + ctx->n_buffers++; + + // GGML_LOG_INFO("%s: initialised tensor '%s' in buffer %d, size = %8.2f MiB\n", + // __func__, tensor->name, buffer_idx, tsize); + + return GGML_STATUS_SUCCESS; +} + +static void ggml_backend_zdnn_buffer_memset_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, uint8_t value, size_t offset, size_t size) { + memset((char *)tensor->data + offset, value, size); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_zdnn_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + memcpy((char *)tensor->data + offset, data, size); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_zdnn_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { + memcpy(data, (const char *)tensor->data + offset, size); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_zdnn_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + ggml_backend_zdnn_buffer_context * ctx = (ggml_backend_zdnn_buffer_context *)buffer->context; + + memset(ctx->all_data, value, ctx->all_size); +} + +static ggml_backend_buffer_i ggml_backend_zdnn_buffer_i = { + /* .free_buffer = */ ggml_backend_zdnn_buffer_free_buffer, + /* .get_base = */ ggml_backend_zdnn_buffer_get_base, + /* .init_tensor = */ ggml_backend_zdnn_buffer_init_tensor, + /* .memset_tensor = */ ggml_backend_zdnn_buffer_memset_tensor, + /* .set_tensor = */ ggml_backend_zdnn_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_zdnn_buffer_get_tensor, + /* .cpy_tensor = */ NULL, + /* .clear = */ ggml_backend_zdnn_buffer_clear, + /* .reset = */ NULL, +}; + +// +// default buffer type +// + +static const char * ggml_backend_zdnn_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return GGML_ZDNN_NAME; + + GGML_UNUSED(buft); +} + +static ggml_backend_buffer_t ggml_backend_zdnn_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + ggml_backend_zdnn_buffer_context * ctx = new ggml_backend_zdnn_buffer_context(); + + const size_t size_page = sysconf(_SC_PAGESIZE); + + size_t size_aligned = size; + if ((size_aligned % size_page) != 0) { + size_aligned += size_page - (size_aligned % size_page); + } + + ggml_backend_zdnn_device_context * ctx_dev = (ggml_backend_zdnn_device_context *)buft->device->context; + + GGML_ASSERT(ctx_dev->zdnn_device >= 0); + int device = ctx_dev->zdnn_device; GGML_UNUSED(device); + + ctx->all_data = ggml_aligned_malloc(size_aligned); + ctx->all_size = size_aligned; + ctx->owned = true; + ctx->n_buffers = 1; + + if (ctx->all_data != NULL) { + std::unique_ptr zdnn_buffer = std::make_unique(); + zdnn_buffer->data = ctx->all_data; + zdnn_buffer->size = size_aligned; + ctx->buffers.push_back(std::move(zdnn_buffer)); + } + + if (size_aligned > 0 && (ctx->all_data == NULL)) { + GGML_LOG_ERROR("%s: error: failed to allocate buffer, size = %8.2f\n", + __func__, size_aligned / 1024.0 / 1024.0); + delete ctx; + return NULL; + } + + return ggml_backend_buffer_init(buft, ggml_backend_zdnn_buffer_i, ctx, size); +} + +static size_t ggml_backend_zdnn_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return 256; + + GGML_UNUSED(buft); +} + +static bool ggml_backend_zdnn_buffer_type_is_host(ggml_backend_buffer_type_t buft) { + return true; + + GGML_UNUSED(buft); +} + +ggml_backend_buffer_type_t ggml_backend_zdnn_buffer_type(void) { + static ggml_backend_buffer_type ggml_backend_buffer_type_zdnn = { + /* .iface = */ { + /* .get_name = */ ggml_backend_zdnn_buffer_type_get_name, + /* .alloc_buffer = */ ggml_backend_zdnn_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_zdnn_buffer_type_get_alignment, + /* .get_max_size = */ NULL, + /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes + /* .is_host = */ ggml_backend_zdnn_buffer_type_is_host, + }, + /* .device = */ &g_ggml_backend_zdnn_device, + /* .context = */ NULL, + }; + + return &ggml_backend_buffer_type_zdnn; +} + +static const char * ggml_backend_zdnn_buffer_from_ptr_type_get_name(ggml_backend_buffer_type_t buft) { + return GGML_ZDNN_NAME "_Mapped"; + + GGML_UNUSED(buft); +} + +static ggml_backend_buffer_type_t ggml_backend_zdnn_buffer_from_ptr_type(void) { + static ggml_backend_buffer_type ggml_backend_buffer_from_ptr_type_zdnn = { + /* .iface = */ { + /* .get_name = */ ggml_backend_zdnn_buffer_from_ptr_type_get_name, + /* .alloc_buffer = */ ggml_backend_zdnn_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_zdnn_buffer_type_get_alignment, + /* .get_max_size = */ NULL, + /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes + /* .is_host = */ ggml_backend_zdnn_buffer_type_is_host, + }, + /* .device = */ &g_ggml_backend_zdnn_device, + /* .context = */ NULL, + }; + + return &ggml_backend_buffer_from_ptr_type_zdnn; +} + +// +// backend +// + +static const char * ggml_backend_zdnn_name(ggml_backend_t backend) { + return GGML_ZDNN_NAME; + + GGML_UNUSED(backend); +} + +static void ggml_backend_zdnn_free(ggml_backend_t backend) { + ggml_backend_zdnn_context * ctx = (ggml_backend_zdnn_context *)backend->context; + + ggml_zdnn_free(ctx); + free(backend); +} + +static enum ggml_status ggml_backend_zdnn_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { + return ggml_zdnn_graph_compute(backend, cgraph); +} + +static ggml_backend_i ggml_backend_zdnn_i = { + /* .get_name = */ ggml_backend_zdnn_name, + /* .free = */ ggml_backend_zdnn_free, + /* .set_tensor_async = */ NULL, + /* .get_tensor_async = */ NULL, + /* .cpy_tensor_async = */ NULL, + /* .synchronize = */ NULL, + /* .graph_plan_create = */ NULL, + /* .graph_plan_free = */ NULL, + /* .graph_plan_update = */ NULL, + /* .graph_plan_compute = */ NULL, + /* .graph_compute = */ ggml_backend_zdnn_graph_compute, + /* .event_record = */ NULL, + /* .event_wait = */ NULL, +}; + +static ggml_guid_t ggml_backend_zdnn_guid(void) { + static const char * guid_str = "IBM-ZDNN-ACCELER"; + return reinterpret_cast((void *)guid_str); +} + +// TODO: remove in the future +ggml_backend_t ggml_backend_zdnn_init(void) { + ggml_backend_dev_t dev = ggml_backend_reg_dev_get(ggml_backend_zdnn_reg(), 0); + + ggml_backend_zdnn_context * ctx = ggml_zdnn_init(dev); + if (ctx == NULL) { + GGML_LOG_ERROR("%s: error: failed to allocate context\n", __func__); + return NULL; + } + + ggml_backend_t backend = (ggml_backend_t)malloc(sizeof(ggml_backend)); + *backend = (ggml_backend) { + /* .guid = */ ggml_backend_zdnn_guid(), + /* .iface = */ ggml_backend_zdnn_i, + /* .device = */ dev, + /* .context = */ ctx, + }; + + return backend; +} + +bool ggml_backend_is_zdnn(ggml_backend_t backend) { + return backend != NULL && + ggml_guid_matches(backend->guid, ggml_backend_zdnn_guid()); + + GGML_UNUSED(backend); +} + +// +// backend device +// + +static const char * ggml_backend_zdnn_device_get_name(ggml_backend_dev_t dev) { + return GGML_ZDNN_NAME; + + GGML_UNUSED(dev); +} + +static const char * ggml_backend_zdnn_device_get_description(ggml_backend_dev_t dev) { + return "IBM Z Neural Network Processing Assist (NNPA)"; +} + +static void ggml_backend_zdnn_device_get_memory(ggml_backend_dev_t dev, size_t * free, size_t * total) { + *free = 0; + *total = 0; +} + +static enum ggml_backend_dev_type ggml_backend_zdnn_device_get_type(ggml_backend_dev_t dev) { + return GGML_BACKEND_DEVICE_TYPE_ACCEL; + + GGML_UNUSED(dev); +} + +static void ggml_backend_zdnn_device_get_props(ggml_backend_dev_t dev, ggml_backend_dev_props * props) { + props->name = ggml_backend_zdnn_device_get_name(dev); + props->description = ggml_backend_zdnn_device_get_description(dev); + props->type = ggml_backend_zdnn_device_get_type(dev); + ggml_backend_zdnn_device_get_memory(dev, &props->memory_free, &props->memory_total); + props->caps = (ggml_backend_dev_caps) { + /* .async = */ false, + /* .host_buffer = */ false, + /* .buffer_from_host_ptr = */ true, + /* .events = */ false, + }; +} + +static ggml_backend_t ggml_backend_zdnn_device_init(ggml_backend_dev_t dev, const char * params) { + ggml_backend_zdnn_context * ctx = ggml_zdnn_init(dev); + if (ctx == NULL) { + GGML_LOG_ERROR("%s: error: failed to allocate context\n", __func__); + return NULL; + } + + ggml_backend_t backend = (ggml_backend *)malloc(sizeof(ggml_backend)); + *backend = (ggml_backend) { + /* .guid = */ ggml_backend_zdnn_guid(), + /* .iface = */ ggml_backend_zdnn_i, + /* .device = */ dev, + /* .context = */ ctx, + }; + + return backend; + + GGML_UNUSED(params); +} + +static ggml_backend_buffer_type_t ggml_backend_zdnn_device_get_buffer_type(ggml_backend_dev_t dev) { + return ggml_backend_zdnn_buffer_type(); + + GGML_UNUSED(dev); +} + +static ggml_backend_buffer_t ggml_backend_zdnn_device_buffer_from_ptr(ggml_backend_dev_t dev, void * ptr, size_t size, size_t max_tensor_size) { + ggml_backend_zdnn_buffer_context * ctx = new ggml_backend_zdnn_buffer_context(); + + ctx->all_data = ptr; + ctx->all_size = size; + ctx->owned = false; + ctx->n_buffers = 0; + + const size_t size_page = sysconf(_SC_PAGESIZE); + + // page-align the data ptr + { + const uintptr_t offs = (uintptr_t) ptr % size_page; + ptr = (void *)((char *)ptr - offs); + size += offs; + } + + size_t size_aligned = size; + if ((size_aligned % size_page) != 0) { + size_aligned += size_page - (size_aligned % size_page); + } + + ggml_backend_zdnn_device_context * ctx_dev = (ggml_backend_zdnn_device_context *)dev->context; + + GGML_ASSERT(ctx_dev->zdnn_device >= 0); + int device = ctx_dev->zdnn_device; GGML_UNUSED(device); + + std::unique_ptr zdnn_buffer = std::make_unique(); + zdnn_buffer->data = ptr; + zdnn_buffer->size = size; + ctx->buffers.push_back(std::move(zdnn_buffer)); + + GGML_LOG_INFO("%s: allocated buffer, size = %8.2f MiB\n", + __func__, size_aligned / 1024.0 / 1024.0); + + ++ctx->n_buffers; + + return ggml_backend_buffer_init(ggml_backend_zdnn_buffer_from_ptr_type(), ggml_backend_zdnn_buffer_i, ctx, size); +} + +static bool ggml_backend_zdnn_device_supports_op(ggml_backend_dev_t dev, const ggml_tensor * op) { + ggml_backend_zdnn_device_context * ctx_dev = (ggml_backend_zdnn_device_context *) dev->context; + + return ggml_zdnn_supports_op(ctx_dev, op); +} + +static bool ggml_backend_zdnn_device_supports_buft(ggml_backend_dev_t dev, ggml_backend_buffer_type_t buft) { + return + buft->iface.get_name == ggml_backend_zdnn_buffer_type_get_name || + buft->iface.get_name == ggml_backend_zdnn_buffer_from_ptr_type_get_name; + + GGML_UNUSED(dev); +} + +static ggml_backend_device_i ggml_backend_zdnn_device_i = { + /* .get_name = */ ggml_backend_zdnn_device_get_name, + /* .get_description = */ ggml_backend_zdnn_device_get_description, + /* .get_memory = */ ggml_backend_zdnn_device_get_memory, + /* .get_type = */ ggml_backend_zdnn_device_get_type, + /* .get_props = */ ggml_backend_zdnn_device_get_props, + /* .init_backend = */ ggml_backend_zdnn_device_init, + /* .get_buffer_type = */ ggml_backend_zdnn_device_get_buffer_type, + /* .get_host_buffer_type = */ NULL, + /* .buffer_from_host_ptr = */ ggml_backend_zdnn_device_buffer_from_ptr, + /* .supports_op = */ ggml_backend_zdnn_device_supports_op, + /* .supports_buft = */ ggml_backend_zdnn_device_supports_buft, + /* .offload_op = */ NULL, + /* .event_new = */ NULL, + /* .event_free = */ NULL, + /* .event_synchronize = */ NULL, +}; + +// +// backend registry +// + +static const char * ggml_backend_zdnn_reg_get_name(ggml_backend_reg_t reg) { + return GGML_ZDNN_NAME; + + GGML_UNUSED(reg); +} + +static size_t ggml_backend_zdnn_reg_device_count(ggml_backend_reg_t reg) { + if (!zdnn_is_nnpa_installed()) { + return 0; + } + return 1; + + GGML_UNUSED(reg); +} + +static ggml_backend_dev_t ggml_backend_zdnn_reg_device_get(ggml_backend_reg_t reg, size_t index) { + GGML_ASSERT(index == 0); + + return &g_ggml_backend_zdnn_device; + + GGML_UNUSED(reg); + GGML_UNUSED(index); +} + +static ggml_backend_feature g_ggml_backend_zdnn_features[] = { + { "NNPA", zdnn_is_nnpa_installed() ? "1" : "0" }, + { "NNPA_PARMBLKFORMAT_0", zdnn_is_nnpa_parmblk_fmt_installed(1, NNPA_PARMBLKFORMAT_0) ? "1" : "0" }, + { "NNPA_PARMBLKFORMAT_1", zdnn_is_nnpa_parmblk_fmt_installed(1, NNPA_PARMBLKFORMAT_1) ? "1" : "0" }, + { NULL, NULL }, +}; + +static ggml_backend_feature * ggml_backend_zdnn_get_features(ggml_backend_reg_t reg) { + return g_ggml_backend_zdnn_features; + + GGML_UNUSED(reg); +} + +static void * ggml_backend_zdnn_get_proc_address(ggml_backend_reg_t reg, const char * name) { + if (strcmp(name, "ggml_backend_get_features") == 0) { + return (void *) ggml_backend_zdnn_get_features; + } + + return NULL; + + GGML_UNUSED(reg); +} + +static ggml_backend_reg_i ggml_backend_zdnn_reg_i = { + /* .get_name = */ ggml_backend_zdnn_reg_get_name, + /* .get_device_count = */ ggml_backend_zdnn_reg_device_count, + /* .get_device = */ ggml_backend_zdnn_reg_device_get, + /* .get_proc_address = */ ggml_backend_zdnn_get_proc_address, +}; + +static void ggml_zdnn_cleanup(void) { + ggml_backend_zdnn_device_rel(&g_ggml_ctx_dev_main); +} + +// TODO: make thread-safe +ggml_backend_reg_t ggml_backend_zdnn_reg(void) { + ggml_backend_zdnn_device_acq(&g_ggml_ctx_dev_main); + + // register cleanup callback + atexit(ggml_zdnn_cleanup); + + { + g_ggml_backend_zdnn_reg = (ggml_backend_reg) { + /* .api_version = */ GGML_ZDNN_VERSION, + /* .iface = */ ggml_backend_zdnn_reg_i, + /* .context = */ NULL, + }; + + g_ggml_backend_zdnn_device = (ggml_backend_device) { + /* .iface = */ ggml_backend_zdnn_device_i, + /* .reg = */ &g_ggml_backend_zdnn_reg, + /* .context = */ &g_ggml_ctx_dev_main, + }; + + return &g_ggml_backend_zdnn_reg; + } +} + +GGML_BACKEND_DL_IMPL(ggml_backend_zdnn_reg) diff --git a/ggml/src/ggml.c b/ggml/src/ggml.c index 55a76f8248..a4417f1a17 100644 --- a/ggml/src/ggml.c +++ b/ggml/src/ggml.c @@ -1012,11 +1012,12 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "CROSS_ENTROPY_LOSS", "CROSS_ENTROPY_LOSS_BACK", "OPT_STEP_ADAMW", + "OPT_STEP_SGD", "GLU", }; -static_assert(GGML_OP_COUNT == 87, "GGML_OP_COUNT != 87"); +static_assert(GGML_OP_COUNT == 88, "GGML_OP_COUNT != 88"); static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "none", @@ -1113,15 +1114,15 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "cross_entropy_loss(x,y)", "cross_entropy_loss_back(x,y)", "adamw(x)", + "sgd(x)", "glu(x)", }; -static_assert(GGML_OP_COUNT == 87, "GGML_OP_COUNT != 87"); +static_assert(GGML_OP_COUNT == 88, "GGML_OP_COUNT != 88"); static_assert(GGML_OP_POOL_COUNT == 2, "GGML_OP_POOL_COUNT != 2"); - static const char * GGML_UNARY_OP_NAME[GGML_UNARY_OP_COUNT] = { "ABS", "SGN", @@ -3885,6 +3886,7 @@ static struct ggml_tensor * ggml_rope_impl( struct ggml_tensor * b, struct ggml_tensor * c, int n_dims, + int sections[GGML_MROPE_SECTIONS], int mode, int n_ctx_orig, float freq_base, @@ -3898,15 +3900,19 @@ static struct ggml_tensor * ggml_rope_impl( GGML_ASSERT(ggml_is_vector(b)); GGML_ASSERT(b->type == GGML_TYPE_I32); - GGML_ASSERT(a->ne[2] == b->ne[0]); + + bool mrope_used = mode & GGML_ROPE_TYPE_MROPE; + if (mrope_used) { + GGML_ASSERT(a->ne[2] * 4 == b->ne[0]); // mrope expecting 4 position ids per token + } else { + GGML_ASSERT(a->ne[2] == b->ne[0]); + } if (c) { GGML_ASSERT(c->type == GGML_TYPE_F32); GGML_ASSERT(c->ne[0] >= n_dims / 2); } - int sections[4] = {0, 0, 0, 0}; - struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); int32_t params[15] = { /*n_past*/ 0, n_dims, mode, /*n_ctx*/ 0, n_ctx_orig }; @@ -3916,7 +3922,11 @@ static struct ggml_tensor * ggml_rope_impl( memcpy(params + 8, &attn_factor, sizeof(float)); memcpy(params + 9, &beta_fast, sizeof(float)); memcpy(params + 10, &beta_slow, sizeof(float)); - memcpy(params + 11, §ions, sizeof(int)*4); + if (mrope_used) { + memcpy(params + 11, sections, sizeof(int32_t) * GGML_MROPE_SECTIONS); + } else { + memset(params + 11, 0, sizeof(int32_t) * GGML_MROPE_SECTIONS); + } ggml_set_op_params(result, params, sizeof(params)); result->op = GGML_OP_ROPE; @@ -3934,7 +3944,7 @@ struct ggml_tensor * ggml_rope( int n_dims, int mode) { return ggml_rope_impl( - ctx, a, b, NULL, n_dims, mode, 0, 10000.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, false + ctx, a, b, NULL, n_dims, NULL, mode, 0, 10000.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, false ); } @@ -3944,7 +3954,7 @@ struct ggml_tensor * ggml_rope_multi( struct ggml_tensor * b, struct ggml_tensor * c, int n_dims, - int sections[4], + int sections[GGML_MROPE_SECTIONS], int mode, int n_ctx_orig, float freq_base, @@ -3953,36 +3963,31 @@ struct ggml_tensor * ggml_rope_multi( float attn_factor, float beta_fast, float beta_slow) { - // Multimodal Rotary Position Embedding - GGML_ASSERT((mode & 1) == 0 && "mode & 1 == 1 is no longer supported"); + return ggml_rope_impl( + ctx, a, b, c, n_dims, sections, mode, n_ctx_orig, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow, false + ); +} - GGML_ASSERT(ggml_is_vector(b)); - GGML_ASSERT(b->type == GGML_TYPE_I32); - GGML_ASSERT(a->ne[2] * 4 == b->ne[0]); // mrope expecting 4 position ids per token - - if (c) { - GGML_ASSERT(c->type == GGML_TYPE_F32); - GGML_ASSERT(c->ne[0] >= n_dims / 2); - } - - struct ggml_tensor * result = ggml_dup_tensor(ctx, a); - - int32_t params[11 + 4] = { /*n_past*/ 0, n_dims, mode, /*n_ctx*/ 0, n_ctx_orig }; - memcpy(params + 5, &freq_base, sizeof(float)); - memcpy(params + 6, &freq_scale, sizeof(float)); - memcpy(params + 7, &ext_factor, sizeof(float)); - memcpy(params + 8, &attn_factor, sizeof(float)); - memcpy(params + 9, &beta_fast, sizeof(float)); - memcpy(params + 10, &beta_slow, sizeof(float)); - memcpy(¶ms[11], sections, sizeof(int)*4); - ggml_set_op_params(result, params, sizeof(params)); - - result->op = GGML_OP_ROPE; - result->src[0] = a; - result->src[1] = b; - result->src[2] = c; - - return result; +struct ggml_tensor * ggml_rope_multi_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + int n_dims, + int sections[GGML_MROPE_SECTIONS], + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow) { + return ggml_rope_impl( + ctx, a, b, c, n_dims, sections, mode, n_ctx_orig, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow, true + ); } struct ggml_tensor * ggml_rope_inplace( @@ -3992,7 +3997,7 @@ struct ggml_tensor * ggml_rope_inplace( int n_dims, int mode) { return ggml_rope_impl( - ctx, a, b, NULL, n_dims, mode, 0, 10000.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, true + ctx, a, b, NULL, n_dims, NULL, mode, 0, 10000.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, true ); } @@ -4011,7 +4016,7 @@ struct ggml_tensor * ggml_rope_ext( float beta_fast, float beta_slow) { return ggml_rope_impl( - ctx, a, b, c, n_dims, mode, n_ctx_orig, freq_base, freq_scale, + ctx, a, b, c, n_dims, NULL, mode, n_ctx_orig, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow, false ); } @@ -4031,7 +4036,7 @@ struct ggml_tensor * ggml_rope_ext_inplace( float beta_fast, float beta_slow) { return ggml_rope_impl( - ctx, a, b, c, n_dims, mode, n_ctx_orig, freq_base, freq_scale, + ctx, a, b, c, n_dims, NULL, mode, n_ctx_orig, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow, true ); } @@ -4050,7 +4055,7 @@ struct ggml_tensor * ggml_rope_custom( float beta_fast, float beta_slow) { return ggml_rope_impl( - ctx, a, b, NULL, n_dims, mode, n_ctx_orig, freq_base, freq_scale, + ctx, a, b, NULL, n_dims, NULL, mode, n_ctx_orig, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow, false ); } @@ -4069,7 +4074,7 @@ struct ggml_tensor * ggml_rope_custom_inplace( float beta_fast, float beta_slow) { return ggml_rope_impl( - ctx, a, b, NULL, n_dims, mode, n_ctx_orig, freq_base, freq_scale, + ctx, a, b, NULL, n_dims, NULL, mode, n_ctx_orig, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow, true ); } @@ -4267,14 +4272,13 @@ struct ggml_tensor * ggml_conv_1d_dw( int s0, int p0, int d0) { - struct ggml_tensor * new_a = ggml_reshape_4d(ctx, a, a->ne[0], 1, a->ne[1], a->ne[2]); struct ggml_tensor * new_b = ggml_reshape_4d(ctx, b, b->ne[0], 1, b->ne[1], b->ne[2]); - struct ggml_tensor * im2col = ggml_im2col(ctx, new_a, new_b, s0, 0, p0, 0, d0, 0, false, GGML_TYPE_F16); + struct ggml_tensor * im2col = ggml_im2col(ctx, a, new_b, s0, 0, p0, 0, d0, 0, false, GGML_TYPE_F16); struct ggml_tensor * result = ggml_mul_mat(ctx, im2col, a); - result = ggml_reshape_3d(ctx, result, b->ne[0], b->ne[1], 1); + result = ggml_reshape_3d(ctx, result, result->ne[0], result->ne[2], 1); return result; } @@ -5602,6 +5606,28 @@ struct ggml_tensor * ggml_opt_step_adamw( return result; } +// opt_step_sgd + +struct ggml_tensor * ggml_opt_step_sgd( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * grad, + struct ggml_tensor * params) { + GGML_ASSERT(a->flags & GGML_TENSOR_FLAG_PARAM); + GGML_ASSERT(ggml_are_same_shape(a, grad)); + GGML_ASSERT(params->type == GGML_TYPE_F32); + GGML_ASSERT(ggml_nelements(params) == 2); + + struct ggml_tensor * result = ggml_view_tensor(ctx, a); + + result->op = GGML_OP_OPT_STEP_SGD; + result->src[0] = a; + result->src[1] = grad; + result->src[2] = params; + + return result; +} + //////////////////////////////////////////////////////////////////////////////// struct ggml_hash_set ggml_hash_set_new(size_t size) { diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 911eea504a..41804f3a2b 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -2832,6 +2832,7 @@ class VisionProjectorType: QWEN2A = "qwen2a" # audio QWEN25O = "qwen2.5o" # omni VOXTRAL = "voxtral" + LFM2 = "lfm2" # Items here are (block size, type size) diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index dc7c03b464..87edaa3232 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -1119,7 +1119,8 @@ class TensorNameMap: "model.vision_tower.embeddings.patch_embeddings.projection", # Intern-S1 "vpm.embeddings.patch_embedding", "model.vision_model.embeddings.patch_embedding", # SmolVLM - "vision_tower.patch_conv", # pixtral + "vision_tower.patch_conv", # pixtral-hf + "vision_encoder.patch_conv", # pixtral "vision_model.patch_embedding.linear", # llama 4 "visual.patch_embed.proj", # qwen2vl ), @@ -1138,7 +1139,8 @@ class TensorNameMap: "vpm.encoder.layers.{bid}.self_attn.q_proj", "model.vision_model.encoder.layers.{bid}.self_attn.q_proj", # SmolVLM "vision_model.model.layers.{bid}.self_attn.q_proj", # llama4 - "vision_tower.transformer.layers.{bid}.attention.q_proj", # pixtral + "vision_tower.transformer.layers.{bid}.attention.q_proj", # pixtral-hf + "vision_encoder.transformer.layers.{bid}.attention.wq", # pixtral "visual.blocks.{bid}.attn.q", # qwen2vl, generated ), @@ -1153,7 +1155,8 @@ class TensorNameMap: "vpm.encoder.layers.{bid}.self_attn.k_proj", "model.vision_model.encoder.layers.{bid}.self_attn.k_proj", # SmolVLM "vision_model.model.layers.{bid}.self_attn.k_proj", # llama4 - "vision_tower.transformer.layers.{bid}.attention.k_proj", # pixtral + "vision_tower.transformer.layers.{bid}.attention.k_proj", # pixtral-hf + "vision_encoder.transformer.layers.{bid}.attention.wk", # pixtral "visual.blocks.{bid}.attn.k", # qwen2vl, generated ), @@ -1168,7 +1171,8 @@ class TensorNameMap: "vpm.encoder.layers.{bid}.self_attn.v_proj", "model.vision_model.encoder.layers.{bid}.self_attn.v_proj", # SmolVLM "vision_model.model.layers.{bid}.self_attn.v_proj", # llama4 - "vision_tower.transformer.layers.{bid}.attention.v_proj", # pixtral + "vision_tower.transformer.layers.{bid}.attention.v_proj", # pixtral-hf + "vision_encoder.transformer.layers.{bid}.attention.wv", # pixtral "visual.blocks.{bid}.attn.v", # qwen2vl, generated ), @@ -1178,7 +1182,8 @@ class TensorNameMap: "model.vision_tower.encoder.layer.{bid}.layernorm_before", # Intern-S1 "vpm.encoder.layers.{bid}.layer_norm1", "model.vision_model.encoder.layers.{bid}.layer_norm1", # SmolVLM - "vision_tower.transformer.layers.{bid}.attention_norm", # pixtral + "vision_tower.transformer.layers.{bid}.attention_norm", # pixtral-hf + "vision_encoder.transformer.layers.{bid}.attention_norm", # pixtral "vision_model.model.layers.{bid}.input_layernorm", # llama4 "visual.blocks.{bid}.norm1", # qwen2vl ), @@ -1190,7 +1195,8 @@ class TensorNameMap: "vpm.encoder.layers.{bid}.self_attn.out_proj", "model.vision_model.encoder.layers.{bid}.self_attn.out_proj", # SmolVLM "vision_model.model.layers.{bid}.self_attn.o_proj", # llama4 - "vision_tower.transformer.layers.{bid}.attention.o_proj", # pixtral + "vision_tower.transformer.layers.{bid}.attention.o_proj", # pixtral-hf + "vision_encoder.transformer.layers.{bid}.attention.wo", # pixtral "visual.blocks.{bid}.attn.proj", # qwen2vl ), @@ -1201,7 +1207,8 @@ class TensorNameMap: "vpm.encoder.layers.{bid}.layer_norm2", "model.vision_model.encoder.layers.{bid}.layer_norm2", # SmolVLM "vision_model.model.layers.{bid}.post_attention_layernorm", # llama4 - "vision_tower.transformer.layers.{bid}.ffn_norm", # pixtral + "vision_tower.transformer.layers.{bid}.ffn_norm", # pixtral-hf + "vision_encoder.transformer.layers.{bid}.ffn_norm", # pixtral "visual.blocks.{bid}.norm2", # qwen2vl ), @@ -1210,14 +1217,16 @@ class TensorNameMap: "model.vision_tower.encoder.layer.{bid}.mlp.fc1", # Intern-S1 "vpm.encoder.layers.{bid}.mlp.fc1", "model.vision_model.encoder.layers.{bid}.mlp.fc1", # SmolVLM, gemma3 - "vision_tower.transformer.layers.{bid}.feed_forward.up_proj", # pixtral + "vision_tower.transformer.layers.{bid}.feed_forward.up_proj", # pixtral-hf + "vision_encoder.transformer.layers.{bid}.feed_forward.w3", # pixtral "vision_model.model.layers.{bid}.mlp.fc1", # llama4 "visual.blocks.{bid}.mlp.fc1", # qwen2vl "visual.blocks.{bid}.mlp.up_proj", # qwen2.5vl ), MODEL_TENSOR.V_ENC_FFN_GATE: ( - "vision_tower.transformer.layers.{bid}.feed_forward.gate_proj", # pixtral + "vision_tower.transformer.layers.{bid}.feed_forward.gate_proj", # pixtral-hf + "vision_encoder.transformer.layers.{bid}.feed_forward.w1", # pixtral "visual.blocks.{bid}.mlp.gate_proj", # qwen2.5vl ), @@ -1226,7 +1235,8 @@ class TensorNameMap: "model.vision_tower.encoder.layer.{bid}.mlp.fc2", # Intern-S1 "vpm.encoder.layers.{bid}.mlp.fc2", "model.vision_model.encoder.layers.{bid}.mlp.fc2", # SmolVLM, gemma3 - "vision_tower.transformer.layers.{bid}.feed_forward.down_proj", # pixtral + "vision_tower.transformer.layers.{bid}.feed_forward.down_proj", # pixtral-hf + "vision_encoder.transformer.layers.{bid}.feed_forward.w2", # pixtral "vision_model.model.layers.{bid}.mlp.fc2", # llama4 "visual.blocks.{bid}.mlp.fc2", # qwen2vl "visual.blocks.{bid}.mlp.down_proj", # qwen2.5vl @@ -1244,7 +1254,8 @@ class TensorNameMap: MODEL_TENSOR.V_PRE_NORM: ( "vision_tower.vision_model.pre_layrnorm", - "vision_tower.ln_pre", # pixtral + "vision_tower.ln_pre", # pixtral-hf + "vision_encoder.ln_pre", # pixtral "vision_model.layernorm_pre", # llama4 ), @@ -1261,6 +1272,8 @@ class TensorNameMap: MODEL_TENSOR.V_MM_INP_NORM: ( "multi_modal_projector.norm", + "multi_modal_projector.layer_norm", + "pre_mm_projector_norm", ), MODEL_TENSOR.V_MM_SOFT_EMB_NORM: ( @@ -1316,7 +1329,8 @@ class TensorNameMap: ), MODEL_TENSOR.V_MM_PATCH_MERGER: ( - "multi_modal_projector.patch_merger.merging_layer", # mistral small 3.1 + "multi_modal_projector.patch_merger.merging_layer", # mistral small 3.1 - hf + "patch_merger.merging_layer", # mistral ), # audio (mtmd) diff --git a/gguf-py/gguf/utility.py b/gguf-py/gguf/utility.py index 00adcbc937..769ccb02f0 100644 --- a/gguf-py/gguf/utility.py +++ b/gguf-py/gguf/utility.py @@ -145,7 +145,11 @@ class SafetensorRemote: tensors[key] = val return tensors - raise ValueError(f"Model {model_id} does not have any safetensor files") + raise ValueError( + f"No safetensor file has been found for model {model_id}." + "If the repo has safetensor files, make sure the model is public or you have a " + "valid Hugging Face token set in the environment variable HF_TOKEN." + ) @classmethod def get_list_tensors(cls, url: str) -> dict[str, RemoteTensor]: diff --git a/include/llama.h b/include/llama.h index f44e2383d0..7fbdebe476 100644 --- a/include/llama.h +++ b/include/llama.h @@ -872,6 +872,29 @@ extern "C" { size_t n_token_capacity, size_t * n_token_count_out); +#define LLAMA_STATE_SEQ_FLAGS_SWA_ONLY 1 + + typedef uint32_t llama_state_seq_flags; + + LLAMA_API size_t llama_state_seq_get_size_ext( + struct llama_context * ctx, + llama_seq_id seq_id, + llama_state_seq_flags flags); + + LLAMA_API size_t llama_state_seq_get_data_ext( + struct llama_context * ctx, + uint8_t * dst, + size_t size, + llama_seq_id seq_id, + llama_state_seq_flags flags); + + LLAMA_API size_t llama_state_seq_set_data_ext( + struct llama_context * ctx, + const uint8_t * src, + size_t size, + llama_seq_id dest_seq_id, + llama_state_seq_flags flags); + // // Decoding // @@ -1439,6 +1462,8 @@ extern "C" { ggml_opt_get_optimizer_params get_opt_pars; // callback for calculating optimizer parameters void * get_opt_pars_ud; // userdata for calculating optimizer parameters + + enum ggml_opt_optimizer_type optimizer_type; }; LLAMA_API void llama_opt_init(struct llama_context * lctx, struct llama_model * model, struct llama_opt_params lopt_params); diff --git a/models/templates/openai-gpt-oss-120b.jinja b/models/templates/openai-gpt-oss-120b.jinja new file mode 100644 index 0000000000..dc7bb11927 --- /dev/null +++ b/models/templates/openai-gpt-oss-120b.jinja @@ -0,0 +1,331 @@ +{#- + In addition to the normal inputs of `messages` and `tools`, this template also accepts the + following kwargs: + - "builtin_tools": A list, can contain "browser" and/or "python". + - "model_identity": A string that optionally describes the model identity. + - "reasoning_effort": A string that describes the reasoning effort, defaults to "medium". + #} + +{#- Tool Definition Rendering ============================================== #} +{%- macro render_typescript_type(param_spec, required_params, is_nullable=false) -%} + {%- if param_spec.type == "array" -%} + {%- if param_spec['items'] -%} + {%- if param_spec['items']['type'] == "string" -%} + {{- "string[]" }} + {%- elif param_spec['items']['type'] == "number" -%} + {{- "number[]" }} + {%- elif param_spec['items']['type'] == "integer" -%} + {{- "number[]" }} + {%- elif param_spec['items']['type'] == "boolean" -%} + {{- "boolean[]" }} + {%- else -%} + {%- set inner_type = render_typescript_type(param_spec['items'], required_params) -%} + {%- if inner_type == "object | object" or inner_type|length > 50 -%} + {{- "any[]" }} + {%- else -%} + {{- inner_type + "[]" }} + {%- endif -%} + {%- endif -%} + {%- if param_spec.nullable -%} + {{- " | null" }} + {%- endif -%} + {%- else -%} + {{- "any[]" }} + {%- if param_spec.nullable -%} + {{- " | null" }} + {%- endif -%} + {%- endif -%} + {%- elif param_spec.type is defined and param_spec.type is iterable and param_spec.type is not string and param_spec.type is not mapping and param_spec.type[0] is defined -%} + {#- Handle array of types like ["object", "object"] from Union[dict, list] #} + {%- if param_spec.type | length > 1 -%} + {{- param_spec.type | join(" | ") }} + {%- else -%} + {{- param_spec.type[0] }} + {%- endif -%} + {%- elif param_spec.oneOf -%} + {#- Handle oneOf schemas - check for complex unions and fallback to any #} + {%- set has_object_variants = false -%} + {%- for variant in param_spec.oneOf -%} + {%- if variant.type == "object" -%} + {%- set has_object_variants = true -%} + {%- endif -%} + {%- endfor -%} + {%- if has_object_variants and param_spec.oneOf|length > 1 -%} + {{- "any" }} + {%- else -%} + {%- for variant in param_spec.oneOf -%} + {{- render_typescript_type(variant, required_params) -}} + {%- if variant.description %} + {{- "// " + variant.description }} + {%- endif -%} + {%- if variant.default is defined %} + {{ "// default: " + variant.default|tojson }} + {%- endif -%} + {%- if not loop.last %} + {{- " | " }} + {% endif -%} + {%- endfor -%} + {%- endif -%} + {%- elif param_spec.type == "string" -%} + {%- if param_spec.enum -%} + {{- '"' + param_spec.enum|join('" | "') + '"' -}} + {%- else -%} + {{- "string" }} + {%- if param_spec.nullable %} + {{- " | null" }} + {%- endif -%} + {%- endif -%} + {%- elif param_spec.type == "number" -%} + {{- "number" }} + {%- elif param_spec.type == "integer" -%} + {{- "number" }} + {%- elif param_spec.type == "boolean" -%} + {{- "boolean" }} + + {%- elif param_spec.type == "object" -%} + {%- if param_spec.properties -%} + {{- "{\n" }} + {%- for prop_name, prop_spec in param_spec.properties.items() -%} + {{- prop_name -}} + {%- if prop_name not in (param_spec.required or []) -%} + {{- "?" }} + {%- endif -%} + {{- ": " }} + {{ render_typescript_type(prop_spec, param_spec.required or []) }} + {%- if not loop.last -%} + {{-", " }} + {%- endif -%} + {%- endfor -%} + {{- "}" }} + {%- else -%} + {{- "object" }} + {%- endif -%} + {%- else -%} + {{- "any" }} + {%- endif -%} +{%- endmacro -%} + +{%- macro render_tool_namespace(namespace_name, tools) -%} + {{- "## " + namespace_name + "\n\n" }} + {{- "namespace " + namespace_name + " {\n\n" }} + {%- for tool in tools %} + {%- set tool = tool.function %} + {{- "// " + tool.description + "\n" }} + {{- "type "+ tool.name + " = " }} + {%- if tool.parameters and tool.parameters.properties %} + {{- "(_: {\n" }} + {%- for param_name, param_spec in tool.parameters.properties.items() %} + {%- if param_spec.description %} + {{- "// " + param_spec.description + "\n" }} + {%- endif %} + {{- param_name }} + {%- if param_name not in (tool.parameters.required or []) -%} + {{- "?" }} + {%- endif -%} + {{- ": " }} + {{- render_typescript_type(param_spec, tool.parameters.required or []) }} + {%- if param_spec.default is defined -%} + {%- if param_spec.enum %} + {{- ", // default: " + param_spec.default }} + {%- elif param_spec.oneOf %} + {{- "// default: " + param_spec.default }} + {%- else %} + {{- ", // default: " + param_spec.default|tojson }} + {%- endif -%} + {%- endif -%} + {%- if not loop.last %} + {{- ",\n" }} + {%- else %} + {{- ",\n" }} + {%- endif -%} + {%- endfor %} + {{- "}) => any;\n\n" }} + {%- else -%} + {{- "() => any;\n\n" }} + {%- endif -%} + {%- endfor %} + {{- "} // namespace " + namespace_name }} +{%- endmacro -%} + +{%- macro render_builtin_tools(browser_tool, python_tool) -%} + {%- if browser_tool %} + {{- "## browser\n\n" }} + {{- "// Tool for browsing.\n" }} + {{- "// The `cursor` appears in brackets before each browsing display: `[{cursor}]`.\n" }} + {{- "// Cite information from the tool using the following format:\n" }} + {{- "// `【{cursor}†L{line_start}(-L{line_end})?】`, for example: `【6†L9-L11】` or `【8†L3】`.\n" }} + {{- "// Do not quote more than 10 words directly from the tool output.\n" }} + {{- "// sources=web (default: web)\n" }} + {{- "namespace browser {\n\n" }} + {{- "// Searches for information related to `query` and displays `topn` results.\n" }} + {{- "type search = (_: {\n" }} + {{- "query: string,\n" }} + {{- "topn?: number, // default: 10\n" }} + {{- "source?: string,\n" }} + {{- "}) => any;\n\n" }} + {{- "// Opens the link `id` from the page indicated by `cursor` starting at line number `loc`, showing `num_lines` lines.\n" }} + {{- "// Valid link ids are displayed with the formatting: `【{id}†.*】`.\n" }} + {{- "// If `cursor` is not provided, the most recent page is implied.\n" }} + {{- "// If `id` is a string, it is treated as a fully qualified URL associated with `source`.\n" }} + {{- "// If `loc` is not provided, the viewport will be positioned at the beginning of the document or centered on the most relevant passage, if available.\n" }} + {{- "// Use this function without `id` to scroll to a new location of an opened page.\n" }} + {{- "type open = (_: {\n" }} + {{- "id?: number | string, // default: -1\n" }} + {{- "cursor?: number, // default: -1\n" }} + {{- "loc?: number, // default: -1\n" }} + {{- "num_lines?: number, // default: -1\n" }} + {{- "view_source?: boolean, // default: false\n" }} + {{- "source?: string,\n" }} + {{- "}) => any;\n\n" }} + {{- "// Finds exact matches of `pattern` in the current page, or the page given by `cursor`.\n" }} + {{- "type find = (_: {\n" }} + {{- "pattern: string,\n" }} + {{- "cursor?: number, // default: -1\n" }} + {{- "}) => any;\n\n" }} + {{- "} // namespace browser\n\n" }} + {%- endif -%} + + {%- if python_tool %} + {{- "## python\n\n" }} + {{- "Use this tool to execute Python code in your chain of thought. The code will not be shown to the user. This tool should be used for internal reasoning, but not for code that is intended to be visible to the user (e.g. when creating plots, tables, or files).\n\n" }} + {{- "When you send a message containing Python code to python, it will be executed in a stateful Jupyter notebook environment. python will respond with the output of the execution or time out after 120.0 seconds. The drive at '/mnt/data' can be used to save and persist user files. Internet access for this session is UNKNOWN. Depends on the cluster.\n\n" }} + {%- endif -%} +{%- endmacro -%} + +{#- System Message Construction ============================================ #} +{%- macro build_system_message() -%} + {%- if model_identity is not defined %} + {%- set model_identity = "You are ChatGPT, a large language model trained by OpenAI." %} + {%- endif %} + {{- model_identity + "\n" }} + {{- "Knowledge cutoff: 2024-06\n" }} + {{- "Current date: " + strftime_now("%Y-%m-%d") + "\n\n" }} + {%- if reasoning_effort is not defined %} + {%- set reasoning_effort = "medium" %} + {%- endif %} + {{- "Reasoning: " + reasoning_effort + "\n\n" }} + {%- if builtin_tools %} + {{- "# Tools\n\n" }} + {%- set available_builtin_tools = namespace(browser=false, python=false) %} + {%- for tool in builtin_tools %} + {%- if tool == "browser" %} + {%- set available_builtin_tools.browser = true %} + {%- elif tool == "python" %} + {%- set available_builtin_tools.python = true %} + {%- endif %} + {%- endfor %} + {{- render_builtin_tools(available_builtin_tools.browser, available_builtin_tools.python) }} + {%- endif -%} + {{- "# Valid channels: analysis, commentary, final. Channel must be included for every message." }} + {%- if tools -%} + {{- "\nCalls to these tools must go to the commentary channel: 'functions'." }} + {%- endif -%} +{%- endmacro -%} + +{#- Main Template Logic ================================================= #} +{#- Set defaults #} + +{#- Render system message #} +{{- "<|start|>system<|message|>" }} +{{- build_system_message() }} +{{- "<|end|>" }} + +{#- Extract developer message #} +{%- if messages[0].role == "developer" or messages[0].role == "system" %} + {%- set developer_message = messages[0].content %} + {%- set loop_messages = messages[1:] %} +{%- else %} + {%- set developer_message = "" %} + {%- set loop_messages = messages %} +{%- endif %} + +{#- Render developer message #} +{%- if developer_message or tools %} + {{- "<|start|>developer<|message|>" }} + {%- if developer_message %} + {{- "# Instructions\n\n" }} + {{- developer_message }} + {{- "\n\n" }} + {%- endif %} + {%- if tools -%} + {{- "# Tools\n\n" }} + {{- render_tool_namespace("functions", tools) }} + {%- endif -%} + {{- "<|end|>" }} +{%- endif %} + +{#- Render messages #} +{%- set last_tool_call = namespace(name=none) %} +{%- for message in loop_messages -%} + {#- At this point only assistant/user/tool messages should remain #} + {%- if message.role == 'assistant' -%} + {#- Checks to ensure the messages are being passed in the format we expect #} + {%- if "content" in message %} + {%- if "<|channel|>analysis<|message|>" in message.content or "<|channel|>final<|message|>" in message.content %} + {{- raise_exception("You have passed a message containing <|channel|> tags in the content field. Instead of doing this, you should pass analysis messages (the string between '<|message|>' and '<|end|>') in the 'thinking' field, and final messages (the string between '<|message|>' and '<|end|>') in the 'content' field.") }} + {%- endif %} + {%- endif %} + {%- if "thinking" in message %} + {%- if "<|channel|>analysis<|message|>" in message.thinking or "<|channel|>final<|message|>" in message.thinking %} + {{- raise_exception("You have passed a message containing <|channel|> tags in the thinking field. Instead of doing this, you should pass analysis messages (the string between '<|message|>' and '<|end|>') in the 'thinking' field, and final messages (the string between '<|message|>' and '<|end|>') in the 'content' field.") }} + {%- endif %} + {%- endif %} + {%- if "tool_calls" in message %} + {#- We need very careful handling here - we want to drop the tool call analysis message if the model #} + {#- has output a later <|final|> message, but otherwise we want to retain it. This is the only case #} + {#- when we render CoT/analysis messages in inference. #} + {%- set future_final_message = namespace(found=false) %} + {%- for future_message in loop_messages[loop.index:] %} + {%- if future_message.role == 'assistant' and "tool_calls" not in future_message %} + {%- set future_final_message.found = true %} + {%- endif %} + {%- endfor %} + {#- We assume max 1 tool call per message, and so we infer the tool call name #} + {#- in "tool" messages from the most recent assistant tool call name #} + {%- set tool_call = message.tool_calls[0] %} + {%- if tool_call.function %} + {%- set tool_call = tool_call.function %} + {%- endif %} + {%- if message.content and message.thinking %} + {{- raise_exception("Cannot pass both content and thinking in an assistant message with tool calls! Put the analysis message in one or the other, but not both.") }} + {%- elif message.content and not future_final_message.found %} + {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.content + "<|end|>" }} + {%- elif message.thinking and not future_final_message.found %} + {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.thinking + "<|end|>" }} + {%- endif %} + {{- "<|start|>assistant to=" }} + {{- "functions." + tool_call.name + "<|channel|>commentary " }} + {{- (tool_call.content_type if tool_call.content_type is defined else "json") + "<|message|>" }} + {{- tool_call.arguments|tojson }} + {{- "<|call|>" }} + {%- set last_tool_call.name = tool_call.name %} + {%- elif loop.last and not add_generation_prompt %} + {#- Only render the CoT if the final turn is an assistant turn and add_generation_prompt is false #} + {#- This is a situation that should only occur in training, never in inference. #} + {%- if "thinking" in message %} + {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.thinking + "<|end|>" }} + {%- endif %} + {#- <|return|> indicates the end of generation, but <|end|> does not #} + {#- <|return|> should never be an input to the model, but we include it as the final token #} + {#- when training, so the model learns to emit it. #} + {{- "<|start|>assistant<|channel|>final<|message|>" + message.content + "<|return|>" }} + {%- else %} + {#- CoT is dropped during all previous turns, so we never render it for inference #} + {{- "<|start|>assistant<|channel|>final<|message|>" + message.content + "<|end|>" }} + {%- set last_tool_call.name = none %} + {%- endif %} + {%- elif message.role == 'tool' -%} + {%- if last_tool_call.name is none %} + {{- raise_exception("Message has tool role, but there was no previous assistant message with a tool call!") }} + {%- endif %} + {{- "<|start|>functions." + last_tool_call.name }} + {{- " to=assistant<|channel|>commentary<|message|>" + message.content|tojson + "<|end|>" }} + {%- elif message.role == 'user' -%} + {{- "<|start|>user<|message|>" + message.content + "<|end|>" }} + {%- endif -%} +{%- endfor -%} + +{#- Generation prompt #} +{%- if add_generation_prompt -%} +<|start|>assistant +{%- endif -%} \ No newline at end of file diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 29d30e0a18..826c560cd8 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -74,21 +74,7 @@ while read c; do cmake/common.cmake \ cmake/ggml-config.cmake.in \ src/ggml-cpu/cmake/FindSIMD.cmake \ - src/ggml*.h \ - src/ggml*.c \ - src/ggml*.cpp \ - src/gguf*.cpp \ - src/ggml-blas/* \ - src/ggml-cann/* \ - src/ggml-cpu/* \ - src/ggml-cuda/* \ - src/ggml-hip/* \ - src/ggml-metal/* \ - src/ggml-musa/* \ - src/ggml-opencl/* \ - src/ggml-rpc/* \ - src/ggml-sycl/* \ - src/ggml-vulkan/* \ + src/ggml* \ include/ggml*.h \ include/gguf*.h \ tests/test-opt.cpp \ @@ -131,21 +117,7 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then # cmake/ggml-config.cmake.in -> ggml/cmake/ggml-config.cmake.in # src/ggml-cpu/cmake/FindSIMD.cmake -> ggml/src/ggml-cpu/cmake/FindSIMD.cmake # - # src/ggml*.c -> ggml/src/ggml*.c - # src/ggml*.cpp -> ggml/src/ggml*.cpp - # src/ggml*.h -> ggml/src/ggml*.h - # src/gguf*.cpp -> ggml/src/gguf*.cpp - # src/ggml-blas/* -> ggml/src/ggml-blas/* - # src/ggml-cann/* -> ggml/src/ggml-cann/* - # src/ggml-cpu/* -> ggml/src/ggml-cpu/* - # src/ggml-cuda/* -> ggml/src/ggml-cuda/* - # src/ggml-hip/* -> ggml/src/ggml-hip/* - # src/ggml-metal/* -> ggml/src/ggml-metal/* - # src/ggml-musa/* -> ggml/src/ggml-musa/* - # src/ggml-opencl/* -> ggml/src/ggml-opencl/* - # src/ggml-rpc/* -> ggml/src/ggml-rpc/* - # src/ggml-sycl/* -> ggml/src/ggml-sycl/* - # src/ggml-vulkan/* -> ggml/src/ggml-vulkan/* + # src/ggml* -> ggml/src/ggml* # # include/ggml*.h -> ggml/include/ggml*.h # include/gguf*.h -> ggml/include/gguf*.h @@ -163,20 +135,7 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then -e 's/([[:space:]]| [ab]\/)cmake\/common.cmake/\1ggml\/cmake\/common.cmake/g' \ -e 's/([[:space:]]| [ab]\/)cmake\/ggml-config.cmake.in/\1ggml\/cmake\/ggml-config.cmake.in/g' \ -e 's/([[:space:]]| [ab]\/)src\/ggml-cpu\/cmake\/FindSIMD.cmake/\1ggml\/src\/ggml-cpu\/cmake\/FindSIMD.cmake/g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml(.*)\.c/\1ggml\/src\/ggml\2.c/g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml(.*)\.cpp/\1ggml\/src\/ggml\2.cpp/g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml(.*)\.h/\1ggml\/src\/ggml\2.h/g' \ - -e 's/([[:space:]]| [ab]\/)src\/gguf(.*)\.cpp/\1ggml\/src\/gguf\2.cpp/g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml-blas\//\1ggml\/src\/ggml-blas\//g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml-cann\//\1ggml\/src\/ggml-cann\//g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml-cpu\//\1ggml\/src\/ggml-cpu\//g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml-cuda\//\1ggml\/src\/ggml-cuda\//g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml-hip\//\1ggml\/src\/ggml-hip\//g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml-metal\//\1ggml\/src\/ggml-metal\//g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml-opencl\//\1ggml\/src\/ggml-opencl\//g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml-rpc\//\1ggml\/src\/ggml-rpc\//g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml-sycl\//\1ggml\/src\/ggml-sycl\//g' \ - -e 's/([[:space:]]| [ab]\/)src\/ggml-vulkan\//\1ggml\/src\/ggml-vulkan\//g' \ + -e 's/([[:space:]]| [ab]\/)src\/ggml(.*)/\1ggml\/src\/ggml\2/g' \ -e 's/([[:space:]]| [ab]\/)include\/ggml(.*)\.h/\1ggml\/include\/ggml\2.h/g' \ -e 's/([[:space:]]| [ab]\/)include\/gguf(.*)\.h/\1ggml\/include\/gguf\2.h/g' \ -e 's/([[:space:]]| [ab]\/)tests\/(.*)\.cpp/\1tests\/\2.cpp/g' \ diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 20a5915bc2..22348782d1 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -daf7906728036a82f20c69fcbd74b6f536c74d3f +323951f1bdcdfbd5b5ff3a9a7c3770e63b1a560e diff --git a/scripts/sync-ggml.sh b/scripts/sync-ggml.sh index 9b98329e09..2da9b57891 100755 --- a/scripts/sync-ggml.sh +++ b/scripts/sync-ggml.sh @@ -6,21 +6,7 @@ cp -rpv ../ggml/src/CMakeLists.txt ./ggml/src/CMakeLists.txt cp -rpv ../ggml/cmake/* ./ggml/cmake/ cp -rpv ../ggml/src/ggml-cpu/cmake/* ./ggml/src/ggml-cpu/cmake/ -cp -rpv ../ggml/src/ggml*.c ./ggml/src/ -cp -rpv ../ggml/src/ggml*.cpp ./ggml/src/ -cp -rpv ../ggml/src/ggml*.h ./ggml/src/ -cp -rpv ../ggml/src/gguf*.cpp ./ggml/src/ -cp -rpv ../ggml/src/ggml-blas/* ./ggml/src/ggml-blas/ -cp -rpv ../ggml/src/ggml-cann/* ./ggml/src/ggml-cann/ -cp -rpv ../ggml/src/ggml-cpu/* ./ggml/src/ggml-cpu/ -cp -rpv ../ggml/src/ggml-cuda/* ./ggml/src/ggml-cuda/ -cp -rpv ../ggml/src/ggml-hip/* ./ggml/src/ggml-hip/ -cp -rpv ../ggml/src/ggml-metal/* ./ggml/src/ggml-metal/ -cp -rpv ../ggml/src/ggml-musa/* ./ggml/src/ggml-musa/ -cp -rpv ../ggml/src/ggml-opencl/* ./ggml/src/ggml-opencl/ -cp -rpv ../ggml/src/ggml-rpc/* ./ggml/src/ggml-rpc/ -cp -rpv ../ggml/src/ggml-sycl/* ./ggml/src/ggml-sycl/ -cp -rpv ../ggml/src/ggml-vulkan/* ./ggml/src/ggml-vulkan/ +cp -rpv ../ggml/src/ggml* ./ggml/src/ cp -rpv ../ggml/include/ggml*.h ./ggml/include/ cp -rpv ../ggml/include/gguf*.h ./ggml/include/ diff --git a/src/llama-batch.cpp b/src/llama-batch.cpp index 8698d89ace..55d89eca0a 100644 --- a/src/llama-batch.cpp +++ b/src/llama-batch.cpp @@ -477,7 +477,7 @@ llama_ubatch llama_batch_allocr::split_simple(uint32_t n_ubatch) { llama_ubatch llama_batch_allocr::split_equal(uint32_t n_ubatch, bool sequential) { if (sequential && has_cpl) { - LLAMA_LOG_ERROR("%s: sequential split is not supported when there are coupled sequences in the input batch\n", __func__); + LLAMA_LOG_ERROR("%s: sequential split is not supported when there are coupled sequences in the input batch (you may need to use the -kvu flag)\n", __func__); return {}; } diff --git a/src/llama-context.cpp b/src/llama-context.cpp index 26a5cf9c3f..1ebfc88ab6 100644 --- a/src/llama-context.cpp +++ b/src/llama-context.cpp @@ -145,11 +145,6 @@ llama_context::llama_context( __func__, n_ctx_per_seq, hparams.n_ctx_train); } - if (!params.swa_full && cparams.n_seq_max > 1 && hparams.is_swa_any()) { - LLAMA_LOG_WARN("%s: requested n_seq_max (%u) > 1, but swa_full is not enabled -- performance may be degraded: %s\n", - __func__, cparams.n_seq_max, "https://github.com/ggml-org/llama.cpp/pull/13845#issuecomment-2924800573"); - } - if (!hparams.vocab_only) { // GPU backends for (auto * dev : model.devices) { @@ -1657,30 +1652,30 @@ size_t llama_context::state_set_data(const uint8_t * src, size_t size) { } } -size_t llama_context::state_seq_get_size(llama_seq_id seq_id) { +size_t llama_context::state_seq_get_size(llama_seq_id seq_id, llama_state_seq_flags flags) { llama_io_write_dummy io; try { - return state_seq_write_data(io, seq_id); + return state_seq_write_data(io, seq_id, flags); } catch (const std::exception & err) { LLAMA_LOG_ERROR("%s: error getting state size: %s\n", __func__, err.what()); return 0; } } -size_t llama_context::state_seq_get_data(llama_seq_id seq_id, uint8_t * dst, size_t size) { +size_t llama_context::state_seq_get_data(llama_seq_id seq_id, uint8_t * dst, size_t size, llama_state_seq_flags flags) { llama_io_write_buffer io(dst, size); try { - return state_seq_write_data(io, seq_id); + return state_seq_write_data(io, seq_id, flags); } catch (const std::exception & err) { LLAMA_LOG_ERROR("%s: error saving state: %s\n", __func__, err.what()); return 0; } } -size_t llama_context::state_seq_set_data(llama_seq_id seq_id, const uint8_t * src, size_t size) { +size_t llama_context::state_seq_set_data(llama_seq_id seq_id, const uint8_t * src, size_t size, llama_state_seq_flags flags) { llama_io_read_buffer io(src, size); try { - return state_seq_read_data(io, seq_id); + return state_seq_read_data(io, seq_id, flags); } catch (const std::exception & err) { LLAMA_LOG_ERROR("%s: error loading state: %s\n", __func__, err.what()); return 0; @@ -1778,7 +1773,7 @@ size_t llama_context::state_seq_load_file(llama_seq_id seq_id, const char * file { const size_t state_size = file.size() - file.tell(); llama_io_read_file io(&file); - const size_t nread = state_seq_read_data(io, seq_id); + const size_t nread = state_seq_read_data(io, seq_id, 0); if (!nread) { LLAMA_LOG_ERROR("%s: failed to restore sequence state\n", __func__); return 0; @@ -1802,7 +1797,7 @@ size_t llama_context::state_seq_save_file(llama_seq_id seq_id, const char * file // save the context state using stream saving llama_io_write_file io(&file); - state_seq_write_data(io, seq_id); + state_seq_write_data(io, seq_id, 0); const size_t res = file.tell(); GGML_ASSERT(res == sizeof(uint32_t) * 3 + sizeof(llama_token) * n_token_count + io.n_bytes()); @@ -1971,21 +1966,21 @@ size_t llama_context::state_read_data(llama_io_read_i & io) { return io.n_bytes(); } -size_t llama_context::state_seq_write_data(llama_io_write_i & io, llama_seq_id seq_id) { +size_t llama_context::state_seq_write_data(llama_io_write_i & io, llama_seq_id seq_id, llama_state_seq_flags flags) { GGML_UNUSED(seq_id); if (memory) { - memory->state_write(io, seq_id); + memory->state_write(io, seq_id, flags); } return io.n_bytes(); } -size_t llama_context::state_seq_read_data(llama_io_read_i & io, llama_seq_id seq_id) { +size_t llama_context::state_seq_read_data(llama_io_read_i & io, llama_seq_id seq_id, llama_state_seq_flags flags) { GGML_UNUSED(seq_id); if (memory) { - memory->state_read(io, seq_id); + memory->state_read(io, seq_id, flags); } return io.n_bytes(); @@ -2048,7 +2043,7 @@ void llama_context::opt_init(struct llama_model * model, struct llama_opt_params opt_params.opt_period = n_batch / n_ubatch; opt_params.get_opt_pars = lopt_params.get_opt_pars; opt_params.get_opt_pars_ud = lopt_params.get_opt_pars_ud; - + opt_params.optimizer = lopt_params.optimizer_type; opt_ctx = ggml_opt_init(opt_params); llama_opt_param_filter param_filter = lopt_params.param_filter; @@ -2801,19 +2796,31 @@ bool llama_state_save_file(llama_context * ctx, const char * path_session, const } size_t llama_state_seq_get_size(llama_context * ctx, llama_seq_id seq_id) { - return ctx->state_seq_get_size(seq_id); + return llama_state_seq_get_size_ext(ctx, seq_id, 0); } size_t llama_state_seq_get_data(llama_context * ctx, uint8_t * dst, size_t size, llama_seq_id seq_id) { - ctx->synchronize(); - - return ctx->state_seq_get_data(seq_id, dst, size); + return llama_state_seq_get_data_ext(ctx, dst, size, seq_id, 0); } size_t llama_state_seq_set_data(llama_context * ctx, const uint8_t * src, size_t size, llama_seq_id seq_id) { + return llama_state_seq_set_data_ext(ctx, src, size, seq_id, 0); +} + +size_t llama_state_seq_get_size_ext(llama_context * ctx, llama_seq_id seq_id, llama_state_seq_flags flags) { + return ctx->state_seq_get_size(seq_id, flags); +} + +size_t llama_state_seq_get_data_ext(llama_context * ctx, uint8_t * dst, size_t size, llama_seq_id seq_id, llama_state_seq_flags flags) { ctx->synchronize(); - return ctx->state_seq_set_data(seq_id, src, size); + return ctx->state_seq_get_data(seq_id, dst, size, flags); +} + +size_t llama_state_seq_set_data_ext(llama_context * ctx, const uint8_t * src, size_t size, llama_seq_id seq_id, llama_state_seq_flags flags) { + ctx->synchronize(); + + return ctx->state_seq_set_data(seq_id, src, size, flags); } size_t llama_state_seq_save_file(llama_context * ctx, const char * filepath, llama_seq_id seq_id, const llama_token * tokens, size_t n_token_count) { diff --git a/src/llama-context.h b/src/llama-context.h index 25c143d56d..230ef8962b 100644 --- a/src/llama-context.h +++ b/src/llama-context.h @@ -111,9 +111,9 @@ struct llama_context { size_t state_get_data( uint8_t * dst, size_t size); size_t state_set_data(const uint8_t * src, size_t size); - size_t state_seq_get_size(llama_seq_id seq_id); - size_t state_seq_get_data(llama_seq_id seq_id, uint8_t * dst, size_t size); - size_t state_seq_set_data(llama_seq_id seq_id, const uint8_t * src, size_t size); + size_t state_seq_get_size(llama_seq_id seq_id, llama_state_seq_flags flags); + size_t state_seq_get_data(llama_seq_id seq_id, uint8_t * dst, size_t size, llama_state_seq_flags flags); + size_t state_seq_set_data(llama_seq_id seq_id, const uint8_t * src, size_t size, llama_state_seq_flags flags); bool state_load_file( const char * filepath, @@ -152,6 +152,7 @@ struct llama_context { void opt_init(struct llama_model * model, struct llama_opt_params lopt_params); + // TODO: more flexible combinations of logical/physical batch size and context size void opt_epoch( ggml_opt_dataset_t dataset, ggml_opt_result_t result_train, @@ -212,8 +213,8 @@ private: size_t state_write_data(llama_io_write_i & io); size_t state_read_data (llama_io_read_i & io); - size_t state_seq_write_data(llama_io_write_i & io, llama_seq_id seq_id); - size_t state_seq_read_data (llama_io_read_i & io, llama_seq_id seq_id); + size_t state_seq_write_data(llama_io_write_i & io, llama_seq_id seq_id, llama_state_seq_flags flags); + size_t state_seq_read_data (llama_io_read_i & io, llama_seq_id seq_id, llama_state_seq_flags flags); // // members diff --git a/src/llama-kv-cache-unified-iswa.cpp b/src/llama-kv-cache-unified-iswa.cpp index 01d27fb4db..1e363fff2a 100644 --- a/src/llama-kv-cache-unified-iswa.cpp +++ b/src/llama-kv-cache-unified-iswa.cpp @@ -194,14 +194,20 @@ bool llama_kv_cache_unified_iswa::get_can_shift() const { return kv_base->get_size() == kv_swa->get_size(); } -void llama_kv_cache_unified_iswa::state_write(llama_io_write_i & io, llama_seq_id seq_id) const { - kv_base->state_write(io, seq_id); - kv_swa ->state_write(io, seq_id); +void llama_kv_cache_unified_iswa::state_write(llama_io_write_i & io, llama_seq_id seq_id, llama_state_seq_flags flags) const { + if ((flags & LLAMA_STATE_SEQ_FLAGS_SWA_ONLY) == 0) { + kv_base->state_write(io, seq_id, flags); + } + + kv_swa->state_write(io, seq_id, flags); } -void llama_kv_cache_unified_iswa::state_read(llama_io_read_i & io, llama_seq_id seq_id) { - kv_base->state_read(io, seq_id); - kv_swa ->state_read(io, seq_id); +void llama_kv_cache_unified_iswa::state_read(llama_io_read_i & io, llama_seq_id seq_id, llama_state_seq_flags flags) { + if ((flags & LLAMA_STATE_SEQ_FLAGS_SWA_ONLY) == 0) { + kv_base->state_read(io, seq_id, flags); + } + + kv_swa->state_read(io, seq_id, flags); } llama_kv_cache_unified * llama_kv_cache_unified_iswa::get_base() const { diff --git a/src/llama-kv-cache-unified-iswa.h b/src/llama-kv-cache-unified-iswa.h index d2650dadd3..7bc4df718d 100644 --- a/src/llama-kv-cache-unified-iswa.h +++ b/src/llama-kv-cache-unified-iswa.h @@ -56,8 +56,8 @@ public: // state write/load - void state_write(llama_io_write_i & io, llama_seq_id seq_id = -1) const override; - void state_read (llama_io_read_i & io, llama_seq_id seq_id = -1) override; + void state_write(llama_io_write_i & io, llama_seq_id seq_id = -1, llama_state_seq_flags flags = 0) const override; + void state_read (llama_io_read_i & io, llama_seq_id seq_id = -1, llama_state_seq_flags flags = 0) override; // // llama_kv_cache_unified_iswa specific API diff --git a/src/llama-kv-cache-unified.cpp b/src/llama-kv-cache-unified.cpp index e539142e6b..478ebffac0 100644 --- a/src/llama-kv-cache-unified.cpp +++ b/src/llama-kv-cache-unified.cpp @@ -223,12 +223,7 @@ void llama_kv_cache_unified::clear(bool data) { } bool llama_kv_cache_unified::seq_rm(llama_seq_id seq_id, llama_pos p0, llama_pos p1) { - GGML_ASSERT(seq_id >= 0 && (size_t) seq_id < seq_to_stream.size()); - - auto & cells = v_cells[seq_to_stream[seq_id]]; - auto & head = v_heads[seq_to_stream[seq_id]]; - - uint32_t new_head = cells.size(); + GGML_ASSERT(seq_id == -1 || (seq_id >= 0 && (size_t) seq_id < seq_to_stream.size())); if (p0 < 0) { p0 = 0; @@ -239,6 +234,11 @@ bool llama_kv_cache_unified::seq_rm(llama_seq_id seq_id, llama_pos p0, llama_pos } if (seq_id >= 0) { + auto & cells = v_cells[seq_to_stream[seq_id]]; + auto & head = v_heads[seq_to_stream[seq_id]]; + + uint32_t new_head = cells.size(); + for (uint32_t i = 0; i < cells.size(); ++i) { if (!cells.pos_in(i, p0, p1)) { continue; @@ -250,26 +250,38 @@ bool llama_kv_cache_unified::seq_rm(llama_seq_id seq_id, llama_pos p0, llama_pos } } } + + // If we freed up a slot, set head to it so searching can start there. + if (new_head != cells.size() && new_head < head) { + head = new_head; + } } else { // match any sequence - for (uint32_t i = 0; i < cells.size(); ++i) { - if (!cells.pos_in(i, p0, p1)) { - continue; + for (uint32_t s = 0; s < n_stream; ++s) { + auto & cells = v_cells[s]; + auto & head = v_heads[s]; + + uint32_t new_head = cells.size(); + + for (uint32_t i = 0; i < cells.size(); ++i) { + if (!cells.pos_in(i, p0, p1)) { + continue; + } + + cells.rm(i); + + if (new_head == cells.size()) { + new_head = i; + } } - cells.rm(i); - - if (new_head == cells.size()) { - new_head = i; + // If we freed up a slot, set head to it so searching can start there. + if (new_head != cells.size() && new_head < head) { + head = new_head; } } } - // If we freed up a slot, set head to it so searching can start there. - if (new_head != cells.size() && new_head < head) { - head = new_head; - } - return true; } @@ -738,66 +750,70 @@ bool llama_kv_cache_unified::update(llama_context * lctx, bool do_shift, const d } llama_kv_cache_unified::slot_info llama_kv_cache_unified::find_slot(const llama_ubatch & ubatch, bool cont) const { + if (debug > 0) { - const auto & cells = v_cells[seq_to_stream[1]]; + for (uint32_t s = 0; s < ubatch.n_seqs_unq; ++s) { + const auto seq_id = ubatch.seq_id_unq[s]; + const auto stream_id = seq_to_stream[seq_id]; + const auto & cells = v_cells[stream_id]; + const uint32_t head_cur = v_heads[stream_id]; - const uint32_t head_cur = v_heads[1]; + LLAMA_LOG_DEBUG("%s: stream[%d], n = %5d, used = %5d, head = %5d, size = %5d, n_swa = %5d\n", + __func__, stream_id, cells.used_max_p1(), cells.get_used(), head_cur, get_size(), n_swa); - LLAMA_LOG_DEBUG("%s: n = %5d, used = %5d, head = %5d, size = %5d, n_swa = %5d\n", - __func__, cells.used_max_p1(), cells.get_used(), head_cur, get_size(), n_swa); - - if ((debug == 2 && n_swa > 0) || debug > 2) { - std::string ss; - for (uint32_t i = 0; i < cells.size(); ++i) { - if (cells.is_empty(i)) { - ss += '.'; - } else { - assert(cells.seq_count(i) >= 1); - - if (cells.seq_count(i) == 1) { - ss += std::to_string(cells.seq_get(i)); + if ((debug == 2 && n_swa > 0) || debug > 2) { + std::string ss; + for (uint32_t i = 0; i < cells.size(); ++i) { + if (cells.is_empty(i)) { + ss += '.'; } else { - ss += 'M'; + assert(cells.seq_count(i) >= 1); + + if (cells.seq_count(i) == 1) { + ss += std::to_string(cells.seq_get(i)); + } else { + ss += 'M'; + } + } + if (i%256 == 255) { + ss += " *"; + ss += '\n'; } } - if (i%256 == 255) { - ss += " *"; - ss += '\n'; - } - } - LLAMA_LOG_DEBUG("\n%s\n", ss.c_str()); - } - - if ((debug == 2 && n_swa > 0) || debug > 2) { - std::string ss; - for (uint32_t i = 0; i < cells.size(); ++i) { - std::string cur; - if (cells.is_empty(i)) { - cur = '.'; - } else { - cur = std::to_string(cells.pos_get(i)); - } - const int n = cur.size(); - for (int j = 0; j < 5 - n; ++j) { - cur += ' '; - } - ss += cur; - if (i%256 == 255) { - ss += " *"; - } - if (i%64 == 63) { - ss += '\n'; - } - } - LLAMA_LOG_DEBUG("\n%s\n", ss.c_str()); - } - - for (int s = 0; s < LLAMA_MAX_SEQ; ++s) { - if (cells.seq_pos_min(s) < 0) { - continue; + LLAMA_LOG_DEBUG("\n%s\n", ss.c_str()); } - LLAMA_LOG_DEBUG("%s: min[%d] = %5d, max[%d] = %5d\n", __func__, s, cells.seq_pos_min(s), s, cells.seq_pos_max(s)); + if ((debug == 2 && n_swa > 0) || debug > 2) { + std::string ss; + for (uint32_t i = 0; i < cells.size(); ++i) { + std::string cur; + if (cells.is_empty(i)) { + cur = '.'; + } else { + cur = std::to_string(cells.pos_get(i)); + } + const int n = cur.size(); + for (int j = 0; j < 5 - n; ++j) { + cur += ' '; + } + ss += cur; + if (i%256 == 255) { + ss += " *"; + } + if (i%64 == 63) { + ss += '\n'; + } + } + LLAMA_LOG_DEBUG("\n%s\n", ss.c_str()); + } + + for (int s = 0; s < LLAMA_MAX_SEQ; ++s) { + if (cells.seq_pos_min(s) < 0) { + continue; + } + + LLAMA_LOG_DEBUG("%s: stream[%d] min[%d] = %5d, max[%d] = %5d\n", __func__, stream_id, s, cells.seq_pos_min(s), s, cells.seq_pos_max(s)); + } } } @@ -1812,7 +1828,9 @@ bool llama_kv_cache_unified::is_masked_swa(llama_pos p0, llama_pos p1) const { return false; } -void llama_kv_cache_unified::state_write(llama_io_write_i & io, llama_seq_id seq_id) const { +void llama_kv_cache_unified::state_write(llama_io_write_i & io, llama_seq_id seq_id, llama_state_seq_flags flags) const { + GGML_UNUSED(flags); + io.write(&n_stream, sizeof(n_stream)); for (uint32_t s = 0; s < n_stream; ++s) { @@ -1863,7 +1881,9 @@ void llama_kv_cache_unified::state_write(llama_io_write_i & io, llama_seq_id seq } } -void llama_kv_cache_unified::state_read(llama_io_read_i & io, llama_seq_id seq_id) { +void llama_kv_cache_unified::state_read(llama_io_read_i & io, llama_seq_id seq_id, llama_state_seq_flags flags) { + GGML_UNUSED(flags); + GGML_ASSERT(seq_id == -1 || (seq_id >= 0 && (size_t) seq_id < seq_to_stream.size())); uint32_t n_stream_cur; diff --git a/src/llama-kv-cache-unified.h b/src/llama-kv-cache-unified.h index 342a675962..07a7c9e4e4 100644 --- a/src/llama-kv-cache-unified.h +++ b/src/llama-kv-cache-unified.h @@ -136,8 +136,8 @@ public: // state write/load - void state_write(llama_io_write_i & io, llama_seq_id seq_id = -1) const override; - void state_read (llama_io_read_i & io, llama_seq_id seq_id = -1) override; + void state_write(llama_io_write_i & io, llama_seq_id seq_id = -1, llama_state_seq_flags flags = 0) const override; + void state_read (llama_io_read_i & io, llama_seq_id seq_id = -1, llama_state_seq_flags flags = 0) override; // // llama_kv_cache_unified specific API diff --git a/src/llama-memory-hybrid.cpp b/src/llama-memory-hybrid.cpp index e98b4e3546..cbeeb21344 100644 --- a/src/llama-memory-hybrid.cpp +++ b/src/llama-memory-hybrid.cpp @@ -165,12 +165,16 @@ llama_pos llama_memory_hybrid::seq_pos_max(llama_seq_id seq_id) const { return std::min(mem_attn->seq_pos_max(seq_id), mem_recr->seq_pos_max(seq_id)); } -void llama_memory_hybrid::state_write(llama_io_write_i & io, llama_seq_id seq_id) const { +void llama_memory_hybrid::state_write(llama_io_write_i & io, llama_seq_id seq_id, llama_state_seq_flags flags) const { + GGML_UNUSED(flags); + mem_attn->state_write(io, seq_id); mem_recr->state_write(io, seq_id); } -void llama_memory_hybrid::state_read(llama_io_read_i & io, llama_seq_id seq_id) { +void llama_memory_hybrid::state_read(llama_io_read_i & io, llama_seq_id seq_id, llama_state_seq_flags flags) { + GGML_UNUSED(flags); + mem_attn->state_read(io, seq_id); mem_recr->state_read(io, seq_id); } diff --git a/src/llama-memory-hybrid.h b/src/llama-memory-hybrid.h index c2d56cd541..acdbc26bfb 100644 --- a/src/llama-memory-hybrid.h +++ b/src/llama-memory-hybrid.h @@ -74,8 +74,8 @@ public: // state write/load - void state_write(llama_io_write_i & io, llama_seq_id seq_id = -1) const override; - void state_read (llama_io_read_i & io, llama_seq_id seq_id = -1) override; + void state_write(llama_io_write_i & io, llama_seq_id seq_id = -1, llama_state_seq_flags flags = 0) const override; + void state_read (llama_io_read_i & io, llama_seq_id seq_id = -1, llama_state_seq_flags flags = 0) override; // // llama_memory_hybrid specific API diff --git a/src/llama-memory-recurrent.cpp b/src/llama-memory-recurrent.cpp index c0c2ec084d..849675c418 100644 --- a/src/llama-memory-recurrent.cpp +++ b/src/llama-memory-recurrent.cpp @@ -680,7 +680,9 @@ size_t llama_memory_recurrent::size_s_bytes() const { return size_s_bytes; } -void llama_memory_recurrent::state_write(llama_io_write_i & io, llama_seq_id seq_id) const { +void llama_memory_recurrent::state_write(llama_io_write_i & io, llama_seq_id seq_id, llama_state_seq_flags flags) const { + GGML_UNUSED(flags); + std::vector> cell_ranges; // ranges, from inclusive, to exclusive uint32_t cell_count = 0; @@ -718,7 +720,9 @@ void llama_memory_recurrent::state_write(llama_io_write_i & io, llama_seq_id seq state_write_data(io, cell_ranges); } -void llama_memory_recurrent::state_read(llama_io_read_i & io, llama_seq_id seq_id) { +void llama_memory_recurrent::state_read(llama_io_read_i & io, llama_seq_id seq_id, llama_state_seq_flags flags) { + GGML_UNUSED(flags); + uint32_t cell_count; io.read_to(&cell_count, sizeof(cell_count)); diff --git a/src/llama-memory-recurrent.h b/src/llama-memory-recurrent.h index 4d094f9a05..95c617b2c9 100644 --- a/src/llama-memory-recurrent.h +++ b/src/llama-memory-recurrent.h @@ -63,8 +63,8 @@ public: // state write/load - void state_write(llama_io_write_i & io, llama_seq_id seq_id = -1) const override; - void state_read (llama_io_read_i & io, llama_seq_id seq_id = -1) override; + void state_write(llama_io_write_i & io, llama_seq_id seq_id = -1, llama_state_seq_flags flags = 0) const override; + void state_read (llama_io_read_i & io, llama_seq_id seq_id = -1, llama_state_seq_flags flags = 0) override; uint32_t head = 0; // the location where the batch will be placed in the cache (see find_slot()) uint32_t size = 0; // total number of cells, shared across all sequences diff --git a/src/llama-memory.h b/src/llama-memory.h index e8ba336e85..171d312cc9 100644 --- a/src/llama-memory.h +++ b/src/llama-memory.h @@ -104,8 +104,8 @@ struct llama_memory_i { // state write/read // - virtual void state_write(llama_io_write_i & io, llama_seq_id seq_id = -1) const = 0; - virtual void state_read (llama_io_read_i & io, llama_seq_id seq_id = -1) = 0; + virtual void state_write(llama_io_write_i & io, llama_seq_id seq_id = -1, llama_state_seq_flags flags = 0) const = 0; + virtual void state_read (llama_io_read_i & io, llama_seq_id seq_id = -1, llama_state_seq_flags flags = 0) = 0; }; using llama_memory_ptr = std::unique_ptr; diff --git a/src/llama-model.cpp b/src/llama-model.cpp index 58ca7df707..431102edea 100644 --- a/src/llama-model.cpp +++ b/src/llama-model.cpp @@ -86,6 +86,7 @@ const char * llm_type_name(llm_type type) { case LLM_TYPE_40B: return "40B"; case LLM_TYPE_65B: return "65B"; case LLM_TYPE_70B: return "70B"; + case LLM_TYPE_120B: return "120B"; case LLM_TYPE_142B: return "142B"; case LLM_TYPE_236B: return "236B"; case LLM_TYPE_290B: return "290B"; @@ -1095,6 +1096,7 @@ void llama_model::load_hparams(llama_model_loader & ml) { ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); switch (hparams.n_layer) { + case 18: type = LLM_TYPE_537M; break; case 26: type = LLM_TYPE_1B; break; case 34: type = LLM_TYPE_4B; break; case 48: type = LLM_TYPE_12B; break; @@ -1833,7 +1835,11 @@ void llama_model::load_hparams(llama_model_loader & ml) { hparams.swa_type = LLAMA_SWA_TYPE_STANDARD; hparams.set_swa_pattern(2); - // TODO: switch (hparams.n_layer) + switch (hparams.n_layer) { + case 24: type = LLM_TYPE_20B; break; + case 36: type = LLM_TYPE_120B; break; + default: type = LLM_TYPE_UNKNOWN; + } } break; case LLM_ARCH_LFM2: { @@ -6742,9 +6748,9 @@ struct llm_build_falcon : public llm_graph_context { ggml_tensor * Qcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 0*sizeof(float)*(n_embd)); ggml_tensor * Kcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head_kv, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 1*sizeof(float)*(n_embd)); - ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + ggml_tensor * Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); // using mode = 2 for neox mode Qcur = ggml_rope_ext( @@ -7022,9 +7028,9 @@ struct llm_build_dbrx : public llm_graph_context { Qcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 0*sizeof(float)*(n_embd)); Kcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head_kv, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 1*sizeof(float)*(n_embd)); - Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); Qcur = ggml_rope_ext( ctx0, Qcur, inp_pos, nullptr, @@ -7144,13 +7150,13 @@ struct llm_build_starcoder : public llm_graph_context { cur = ggml_add(ctx0, cur, model.layers[il].bqkv); cb(cur, "bqkv", il); - ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + ggml_tensor * Qcur = ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd)); + ggml_tensor * Kcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd)); + ggml_tensor * Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Qcur = ggml_cont_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_cont_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); @@ -7366,13 +7372,15 @@ struct llm_build_bert : public llm_graph_context { cb(cur, "bqkv", il); } - Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + Qcur = ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd)); + Kcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd)); + Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); } else { Qcur = ggml_add(ctx0, build_lora_mm(model.layers[il].wq, cur), model.layers[il].bq); Kcur = ggml_add(ctx0, build_lora_mm(model.layers[il].wk, cur), model.layers[il].bk); Vcur = ggml_add(ctx0, build_lora_mm(model.layers[il].wv, cur), model.layers[il].bv); + Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); } if (model.layers[il].attn_q_norm) { @@ -7380,6 +7388,10 @@ struct llm_build_bert : public llm_graph_context { model.layers[il].attn_q_norm, model.layers[il].attn_q_norm_b, LLM_NORM, il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + } else { + Qcur = ggml_cont_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); } if (model.layers[il].attn_k_norm) { @@ -7387,11 +7399,11 @@ struct llm_build_bert : public llm_graph_context { model.layers[il].attn_k_norm, model.layers[il].attn_k_norm_b, LLM_NORM, il); - } - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + } else { + Kcur = ggml_cont_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + } // RoPE if (model.arch == LLM_ARCH_NOMIC_BERT || model.arch == LLM_ARCH_NOMIC_BERT_MOE) { @@ -7536,9 +7548,9 @@ struct llm_build_neo_bert : public llm_graph_context { Qcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 0*sizeof(float)*(n_embd)); Kcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head_kv, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 1*sizeof(float)*(n_embd)); - Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); // RoPE Qcur = ggml_rope_ext( @@ -7645,13 +7657,13 @@ struct llm_build_bloom : public llm_graph_context { cur = ggml_add(ctx0, cur, model.layers[il].bqkv); cb(cur, "bqkv", il); - ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + ggml_tensor * Qcur = ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd)); + ggml_tensor * Kcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd)); + ggml_tensor * Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Qcur = ggml_cont_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_cont_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); @@ -7769,7 +7781,7 @@ struct llm_build_mpt : public llm_graph_context { ggml_tensor * Qcur = ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd)); ggml_tensor * Kcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd)); - ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + ggml_tensor * Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); @@ -7788,17 +7800,18 @@ struct llm_build_mpt : public llm_graph_context { model.layers[il].attn_k_norm_b, LLM_NORM, il); cb(Kcur, "Kcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); } else { - Qcur = ggml_cont(ctx0, Qcur); + Qcur = ggml_cont_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); cb(Qcur, "Qcur", il); - Kcur = ggml_cont(ctx0, Kcur); + Kcur = ggml_cont_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); cb(Kcur, "Kcur", il); } - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); @@ -8050,9 +8063,9 @@ struct llm_build_qwen : public llm_graph_context { ggml_tensor * Qcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 0*sizeof(float)*(n_embd)); ggml_tensor * Kcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head_kv, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 1*sizeof(float)*(n_embd)); - ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 2*sizeof(float)*(n_embd))); + ggml_tensor * Vcur = ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 2*sizeof(float)*(n_embd)); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); // using mode = 2 for neox mode Qcur = ggml_rope_ext( @@ -9025,21 +9038,21 @@ struct llm_build_phi2 : public llm_graph_context { Qcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 0*sizeof(float)*(n_embd)); Kcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head_kv, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 1*sizeof(float)*(n_embd)); - Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); } else { Qcur = ggml_add(ctx0, build_lora_mm(model.layers[il].wq, attn_norm_output), model.layers[il].bq); Kcur = ggml_add(ctx0, build_lora_mm(model.layers[il].wk, attn_norm_output), model.layers[il].bk); Vcur = ggml_add(ctx0, build_lora_mm(model.layers[il].wv, attn_norm_output), model.layers[il].bv); Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); } cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); cb(Vcur, "Vcur", il); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); - Qcur = ggml_rope_ext( ctx0, Qcur, inp_pos, nullptr, n_rot, rope_type, n_ctx_orig, freq_base, freq_scale, @@ -9163,21 +9176,21 @@ struct llm_build_phi3 : public llm_graph_context { Qcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head, n_tokens, n_embd_head * sizeof(float), cur->nb[1], 0 * sizeof(float) * (n_embd)); Kcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head_kv, n_tokens, n_embd_head * sizeof(float), cur->nb[1], 1 * sizeof(float) * (n_embd)); - Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1 * sizeof(float) * (n_embd + n_embd_gqa))); + Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1 * sizeof(float) * (n_embd + n_embd_gqa)); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); } else { Qcur = ggml_add(ctx0, build_lora_mm(model.layers[il].wq, attn_norm_output), model.layers[il].bq); Kcur = ggml_add(ctx0, build_lora_mm(model.layers[il].wk, attn_norm_output), model.layers[il].bk); Vcur = ggml_add(ctx0, build_lora_mm(model.layers[il].wv, attn_norm_output), model.layers[il].bv); Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); } cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); cb(Vcur, "Vcur", il); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); - Qcur = ggml_rope_ext( ctx0, Qcur, inp_pos, rope_factors, n_rot, rope_type, n_ctx_orig, freq_base, freq_scale, @@ -9427,17 +9440,17 @@ struct llm_build_gpt2 : public llm_graph_context { cur = ggml_add(ctx0, cur, model.layers[il].bqkv); cb(cur, "bqkv", il); - ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + ggml_tensor * Qcur = ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd)); + ggml_tensor * Kcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd)); + ggml_tensor * Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); cb(Vcur, "Vcur", il); - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Qcur = ggml_cont_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_cont_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); cur = build_attn(inp_attn, model.layers[il].wo, model.layers[il].bo, @@ -9533,9 +9546,9 @@ struct llm_build_codeshell : public llm_graph_context { ggml_tensor * Qcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 0*sizeof(float)*(n_embd)); ggml_tensor * Kcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head_kv, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 1*sizeof(float)*(n_embd)); - ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + ggml_tensor * Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); Qcur = ggml_rope_ext( ctx0, Qcur, inp_pos, nullptr, @@ -10863,8 +10876,8 @@ struct llm_build_gemma3n_iswa : public llm_graph_context { ggml_tensor * all_coefs = build_lora_mm(model.layers[il].altup_correct_coef, modalities); // [n_altup, n_tokens] all_coefs = ggml_scale_bias(ctx0, all_coefs, 1.0f, 1.0f); // + 1.0 cb(all_coefs, "all_coefs", il); - all_coefs = ggml_cont(ctx0, ggml_transpose(ctx0, all_coefs)); // [n_tokens, n_altup] - all_coefs = ggml_reshape_3d(ctx0, all_coefs, 1, n_tokens, n_altup); // [1, n_tokens, n_altup] + all_coefs = ggml_transpose(ctx0, all_coefs); // [n_tokens, n_altup] + all_coefs = ggml_cont_3d(ctx0, all_coefs, 1, n_tokens, n_altup); // [1, n_tokens, n_altup] innovation = ggml_repeat_4d(ctx0, innovation, n_embd, n_tokens, n_altup, 1); ggml_tensor * corrected = ggml_mul(ctx0, innovation, all_coefs); // [n_embd, n_tokens, n_altup] @@ -12277,9 +12290,9 @@ struct llm_build_gptneox : public llm_graph_context { ggml_tensor * Qcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 0*sizeof(float)*(n_embd)); ggml_tensor * Kcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head_kv, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 1*sizeof(float)*(n_embd)); - ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + ggml_tensor * Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); Qcur = ggml_rope_ext( ctx0, Qcur, inp_pos, nullptr, @@ -13412,17 +13425,17 @@ struct llm_build_jais : public llm_graph_context { cur = ggml_add(ctx0, cur, model.layers[il].bqkv); cb(cur, "bqkv", il); - ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*cur->nb[0]*(n_embd))); - ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*cur->nb[0]*(n_embd))); - ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*cur->nb[0]*(n_embd + n_embd_gqa))); + ggml_tensor * Qcur = ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*cur->nb[0]*(n_embd)); + ggml_tensor * Kcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*cur->nb[0]*(n_embd)); + ggml_tensor * Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*cur->nb[0]*(n_embd + n_embd_gqa)); cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); cb(Vcur, "Vcur", il); - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); + Qcur = ggml_cont_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_cont_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); cur = build_attn(inp_attn, model.layers[il].wo, model.layers[il].bo, @@ -13525,6 +13538,7 @@ struct llm_build_chatglm : public llm_graph_context { } Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); } else { cur = build_lora_mm(model.layers[il].wqkv, cur); cb(cur, "wqkv", il); @@ -13534,11 +13548,10 @@ struct llm_build_chatglm : public llm_graph_context { } Qcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 0*sizeof(float)*(n_embd)); Kcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head_kv, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 1*sizeof(float)*(n_embd)); - Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); } - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); - //printf("freq_base: %f freq_scale: %f ext_factor: %f attn_factor: %f\n", freq_base, freq_scale, ext_factor, attn_factor); Qcur = ggml_rope_ext( ctx0, Qcur, inp_pos, nullptr, @@ -13659,6 +13672,7 @@ struct llm_build_glm4 : public llm_graph_context { } Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); } else { cur = build_lora_mm(model.layers[il].wqkv, cur); cb(cur, "wqkv", il); @@ -13668,11 +13682,10 @@ struct llm_build_glm4 : public llm_graph_context { } Qcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 0*sizeof(float)*(n_embd)); Kcur = ggml_view_3d(ctx0, cur, n_embd_head, n_head_kv, n_tokens, n_embd_head*sizeof(float), cur->nb[1], 1*sizeof(float)*(n_embd)); - Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + Vcur = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa)); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); } - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head, n_head_kv, n_tokens); - Qcur = ggml_rope_ext( ctx0, Qcur, inp_pos, nullptr, n_rot, rope_type, n_ctx_orig, freq_base, freq_scale, @@ -16839,13 +16852,13 @@ private: ggml_tensor * Qcur = ggml_view_3d(ctx0, qkv, n_embd_head_q, n_head, n_tokens, n_embd_head_q * sizeof(float), qkv->nb[1], q_offset * ggml_element_size(qkv)); ggml_tensor * Kcur = ggml_view_3d(ctx0, qkv, n_embd_head_k, n_head_kv, n_tokens, n_embd_head_k * sizeof(float), qkv->nb[1], k_offset * ggml_element_size(qkv)); - ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, qkv, n_embd_head_v * n_head_kv, n_tokens, qkv->nb[1], v_offset * ggml_element_size(qkv))); + ggml_tensor * Vcur = ggml_view_2d(ctx0, qkv, n_embd_head_v * n_head_kv, n_tokens, qkv->nb[1], v_offset * ggml_element_size(qkv)); cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); cb(Vcur, "Vcur", il); - Vcur = ggml_reshape_3d(ctx0, Vcur, n_embd_head_v, n_head_kv, n_tokens); + Vcur = ggml_cont_3d(ctx0, Vcur, n_embd_head_v, n_head_kv, n_tokens); Qcur = build_norm(Qcur, model.layers[il].attn_q_norm, NULL, LLM_NORM_RMS, il); cb(Qcur, "Qcur_normed", il); @@ -16912,15 +16925,13 @@ private: cb(zx, "mamba_in_proj", il); // {8192, 5, 1, 1} -> {8192, 1, 5, 1} zx = ggml_permute(ctx0, zx, 0, 2, 1, 3); - zx = ggml_cont(ctx0, zx); - zx = ggml_reshape_4d(ctx0, zx, head_dim * 2, n_heads, n_seq_tokens, n_seqs); + zx = ggml_cont_4d(ctx0, zx, head_dim * 2, n_heads, n_seq_tokens, n_seqs); cb(zx, "mamba_in_proj_out", il); // split into z and x // => {head_dim * n_heads, n_seq_tokens, n_seqs} ggml_tensor * x = ggml_view_4d(ctx0, zx, head_dim, n_heads, n_seq_tokens, n_seqs, zx->nb[1], zx->nb[2], zx->nb[3], head_dim*ggml_element_size(zx)); - x = ggml_cont(ctx0, x); - x = ggml_reshape_3d(ctx0, x, head_dim * n_heads, n_seq_tokens, n_seqs); + x = ggml_cont_3d(ctx0, x, head_dim * n_heads, n_seq_tokens, n_seqs); // x = ggml_permute(ctx0, x, 0, 2, 1, 3); cb(x, "mamba_x_split", il); diff --git a/src/llama-model.h b/src/llama-model.h index 6fcd74d57f..f639fa1398 100644 --- a/src/llama-model.h +++ b/src/llama-model.h @@ -39,6 +39,7 @@ enum llm_type { LLM_TYPE_410M, LLM_TYPE_450M, LLM_TYPE_475M, + LLM_TYPE_537M, LLM_TYPE_700M, LLM_TYPE_770M, LLM_TYPE_780M, @@ -78,6 +79,7 @@ enum llm_type { LLM_TYPE_40B, LLM_TYPE_65B, LLM_TYPE_70B, + LLM_TYPE_120B, LLM_TYPE_142B, LLM_TYPE_236B, LLM_TYPE_290B, diff --git a/src/llama-vocab.cpp b/src/llama-vocab.cpp index f7e03e702e..de5d1681df 100644 --- a/src/llama-vocab.cpp +++ b/src/llama-vocab.cpp @@ -2341,7 +2341,7 @@ void llama_vocab::impl::load(llama_model_loader & ml, const LLM_KV & kv) { // @ngxson : quick hack for gpt-oss, always render these tokens for (const auto & t : token_to_id) { - if (t.first == "<|channel|>" || t.first == "<|message|>" || t.first == "<|start|>") { + if (t.first == "<|channel|>" || t.first == "<|message|>" || t.first == "<|start|>" || t.first == "<|constrain|>") { id_to_token[t.second].attr = LLAMA_TOKEN_ATTR_USER_DEFINED; } } @@ -2388,6 +2388,7 @@ void llama_vocab::impl::load(llama_model_loader & ml, const LLM_KV & kv) { if (has_return && has_call && has_end) { special_eog_ids.erase(end_id); + id_to_token[end_id].attr = LLAMA_TOKEN_ATTR_USER_DEFINED; LLAMA_LOG_WARN("%s: special_eog_ids contains both '<|return|>' and '<|call|>' tokens, removing '<|end|>' token from EOG list\n", __func__); } } diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 9658abf969..9171957756 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -192,7 +192,10 @@ if (NOT WIN32) llama_build_and_test(test-arg-parser.cpp) endif() -# llama_build_and_test(test-opt.cpp) # SLOW +if (NOT LLAMA_SANITIZE_ADDRESS) + # TODO: repair known memory leaks + llama_build_and_test(test-opt.cpp) +endif() llama_build_and_test(test-gguf.cpp) llama_build_and_test(test-backend-ops.cpp) diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index d29779cd12..4623605f66 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -2491,12 +2491,12 @@ struct test_bin_bcast : public test_case { : op(op), type(type), ne(ne), nr(nr), nf(nf) {} ggml_tensor * build_graph(ggml_context * ctx) override { - GGML_ASSERT(nf <= 8); + GGML_ASSERT(nf <= 16); ggml_tensor * a = ggml_new_tensor_4d(ctx, type, ne[0]*nr[0], ne[1]*nr[1], ne[2]*nr[2], ne[3]*nr[3]); ggml_set_name(a, "a"); - ggml_tensor * b[8]; + ggml_tensor * b[16]; for (int i = 0; i < nf; ++i) { b[i] = ggml_new_tensor(ctx, type, 4, ne.data()); ggml_set_name(b[i], (std::string("b") + std::to_string(i)).c_str()); @@ -4791,6 +4791,45 @@ struct test_opt_step_adamw : public test_case { } }; +struct test_opt_step_sgd : public test_case { + const ggml_type type; + const std::array ne; + + std::string vars() override { return VARS_TO_STR2(type, ne); } + + test_opt_step_sgd(ggml_type type = GGML_TYPE_F32, + std::array ne = { 10, 5, 4, 3 }) + : type(type), ne(ne) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor_4d(ctx, type, ne[0], ne[1], ne[2], ne[3]); + ggml_set_param(a); // Despite tensor a having gradients the output tensor will not. + ggml_set_name(a, "a"); + + ggml_tensor * grad = ggml_new_tensor_4d(ctx, type, ne[0], ne[1], ne[2], ne[3]); + ggml_set_name(grad, "grad"); + + ggml_tensor * sgd_params = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 2); + ggml_set_name(sgd_params, "sgd_params"); + + ggml_tensor * out = ggml_opt_step_sgd(ctx, a, grad, sgd_params); + + ggml_set_name(out, "out"); + + return out; + } + + void initialize_tensors(ggml_context * ctx) override { + for (ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { + init_tensor_uniform(t, 0.0f, 1.0f); // sgd_params need non-negative values. + } + } + + bool grad_precise() override { + return true; + } +}; + enum llm_norm_type { LLM_NORM, LLM_NORM_RMS, @@ -5489,6 +5528,7 @@ static std::vector> make_test_cases_eval() { test_cases.emplace_back(new test_count_equal(GGML_TYPE_F32, {4, 5000, 1, 1})); test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {32, 1, 1, 1})); + test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {32, 513, 1, 1})); test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {100, 10, 1, 1})); test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {1024, 10, 1, 1})); test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {1024, 12, 1, 1})); @@ -5618,6 +5658,7 @@ static std::vector> make_test_cases_eval() { test_cases.emplace_back(new test_bin_bcast(ggml_add, GGML_TYPE_F32, {10, 5, 4, 3}, {1, 1, 2, 2}, 6)); test_cases.emplace_back(new test_bin_bcast(ggml_add, GGML_TYPE_F32, {10, 5, 4, 3}, {1, 2, 2, 2}, 7)); test_cases.emplace_back(new test_bin_bcast(ggml_add, GGML_TYPE_F32, {16, 5, 4, 3}, {2, 2, 2, 2}, 8)); + test_cases.emplace_back(new test_bin_bcast(ggml_add, GGML_TYPE_F32, {16, 5, 4, 3}, {1, 1, 1, 1}, 16)); test_cases.emplace_back(new test_add1()); test_cases.emplace_back(new test_scale()); @@ -5784,6 +5825,8 @@ static std::vector> make_test_cases_eval() { test_cases.emplace_back(new test_mul_mat_id(GGML_TYPE_F16, GGML_TYPE_F32, 16, 16, b, 32, 1024, 16)); } + test_cases.emplace_back(new test_mul_mat_id(GGML_TYPE_F16, GGML_TYPE_F32, 1, 1, false, 8, 16, 1)); + for (ggml_type type_a : base_types) { for (ggml_type type_b : {GGML_TYPE_F32 /*, GGML_TYPE_F16 */}) { for (int n_mats : {4, 8}) { @@ -5985,6 +6028,7 @@ static std::vector> make_test_cases_eval() { test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {8, 1, 1, 1}, order)); test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {16, 10, 10, 10}, order)); test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {60, 10, 10, 10}, order)); // qwen + test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {1024, 1, 1, 1}, order)); } for (ggml_scale_mode mode : {GGML_SCALE_MODE_NEAREST, GGML_SCALE_MODE_BILINEAR}) { @@ -5998,6 +6042,15 @@ static std::vector> make_test_cases_eval() { test_cases.emplace_back(new test_sum()); test_cases.emplace_back(new test_sum_rows()); test_cases.emplace_back(new test_mean()); + test_cases.emplace_back(new test_sum(GGML_TYPE_F32, { 33, 1, 1, 1 })); + test_cases.emplace_back(new test_sum_rows(GGML_TYPE_F32, { 33, 1, 1, 1 })); + test_cases.emplace_back(new test_mean(GGML_TYPE_F32, { 33, 1, 1, 1 })); + test_cases.emplace_back(new test_sum(GGML_TYPE_F32, { 33, 1024, 1, 1 })); + test_cases.emplace_back(new test_sum_rows(GGML_TYPE_F32, { 33, 1024, 1, 1 })); + test_cases.emplace_back(new test_sum(GGML_TYPE_F32, { 33, 256, 1, 1 })); + test_cases.emplace_back(new test_sum_rows(GGML_TYPE_F32, { 33, 256, 1, 1 })); + test_cases.emplace_back(new test_mean(GGML_TYPE_F32, { 33, 256, 1, 1 })); + test_cases.emplace_back(new test_mean(GGML_TYPE_F32, { 32769, 1, 1, 1 })); test_cases.emplace_back(new test_group_norm(GGML_TYPE_F32, {64, 64, 320, 1})); test_cases.emplace_back(new test_group_norm(GGML_TYPE_F32, {9, 9, 1280, 1})); test_cases.emplace_back(new test_acc()); @@ -6058,6 +6111,7 @@ static std::vector> make_test_cases_eval() { test_cases.emplace_back(new test_cross_entropy_loss_back(GGML_TYPE_F32, {30000, 1, 1, 1})); test_cases.emplace_back(new test_opt_step_adamw(GGML_TYPE_F32, {10, 5, 4, 3})); + test_cases.emplace_back(new test_opt_step_sgd(GGML_TYPE_F32, {10, 5, 4, 3})); #if 0 // these tests are disabled to save execution time, sbut they can be handy for debugging @@ -6179,6 +6233,18 @@ static std::vector> make_test_cases_perf() { test_cases.emplace_back(new test_add_id(GGML_TYPE_F32, GGML_TYPE_F32, 2880, 32, 4, n_token)); } + std::vector> reduce_rows_cases = { + { 8192, 1, 1, 1 }, + { 8192, 8192, 1, 1 }, + { 128, 8192, 1, 1 }, + }; + + for (auto it: reduce_rows_cases){ + test_cases.emplace_back(new test_mean(GGML_TYPE_F32, it)); + test_cases.emplace_back(new test_sum_rows(GGML_TYPE_F32, it)); + test_cases.emplace_back(new test_sum(GGML_TYPE_F32, it)); + } + return test_cases; } diff --git a/tests/test-chat.cpp b/tests/test-chat.cpp index 99b4b4d5ba..a6daa93a82 100644 --- a/tests/test-chat.cpp +++ b/tests/test-chat.cpp @@ -1408,7 +1408,7 @@ static void test_template_output_parsers() { /* is_partial= */ false, { /* .format = */ COMMON_CHAT_FORMAT_GRANITE, - /* .reasoning_format = */ COMMON_REASONING_FORMAT_GRANITE, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK, })); // Test parsing tool calls @@ -1439,6 +1439,188 @@ static void test_template_output_parsers() { /* expect_grammar_triggered= */ false ); } + { + auto tmpls = read_templates("models/templates/openai-gpt-oss-120b.jinja"); + std::vector end_tokens{ "<|return|>", "<|call|>" }; + + assert_equals(COMMON_CHAT_FORMAT_GPT_OSS, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format); + assert_equals(COMMON_CHAT_FORMAT_GPT_OSS, common_chat_templates_apply(tmpls.get(), inputs_tools).format); + + assert_msg_equals(simple_assist_msg("", "I'm\nthink"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthink", + /* is_partial= */ true, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + assert_msg_equals(simple_assist_msg("", "I'm\nthinking"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>", + /* is_partial= */ true, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + assert_msg_equals(simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>final<|message|>Hello, world!\nWhat's up?", + /* is_partial= */ false, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + assert_msg_equals(simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>commentary to=functions.special_function <|constrain|>json<|message|>{\"arg1", + /* is_partial= */ true, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + assert_msg_equals(simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>commentary to=functions.special_function<|message|>{\"arg1", + /* is_partial= */ true, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + assert_msg_equals(simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1\": 1}"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>commentary to=functions.special_function <|constrain|>json<|message|>{\"arg1\": 1}", + /* is_partial= */ false, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + assert_msg_equals(simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1\": 1}"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>analysis to=functions.special_function <|constrain|>json<|message|>{\"arg1\": 1}", + /* is_partial= */ false, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + assert_msg_equals(simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>commentary<|message|>Hello, world!\nWhat's up?", + /* is_partial= */ true, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + assert_msg_equals(simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking", "special_function", "{\"arg1\": 1}"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>commentary<|message|>Hello, world!\nWhat's up?<|end|>" + "<|start|>assistant<|channel|>commentary to=functions.special_function <|constrain|>json<|message|>{\"arg1\": 1}", + /* is_partial= */ true, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + + // Test parse_tool_calls == false + assert_msg_equals( + simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>final<|message|>Hello, world!\nWhat's up?", + /* is_partial= */ true, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + /* .reasoning_in_content = */ false, + /* .thinking_forced_open = */ false, + /* .parse_tool_calls = */ false, + })); + assert_msg_equals( + simple_assist_msg("", "I'm\nthinking"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>commentary to=functions.special_function<|message|>{\"arg1", + /* is_partial= */ true, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + /* .reasoning_in_content = */ false, + /* .thinking_forced_open = */ false, + /* .parse_tool_calls = */ false, + })); + assert_msg_equals( + simple_assist_msg("", "I'm\nthinking"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>commentary to=functions.special_function <|constrain|>json<|message|>{\"arg1\": 1}", + /* is_partial= */ false, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + /* .reasoning_in_content = */ false, + /* .thinking_forced_open = */ false, + /* .parse_tool_calls = */ false, + })); + + // Test reasoning formats + assert_msg_equals( + simple_assist_msg( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>Hello, world!\nWhat's up?"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>final<|message|>Hello, world!\nWhat's up?", + /* is_partial= */ false, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_NONE, + })); + + assert_msg_equals( + simple_assist_msg( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>Hello, world!\nWhat's up?"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant<|channel|>final<|message|>Hello, world!\nWhat's up?", + /* is_partial= */ false, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + /* .reasoning_in_content = */ true, + })); + + // Test tool calling in role header + assert_msg_equals(simple_assist_msg("", "", "special_function", "{\"arg1\": 1}"), + common_chat_parse( + " to=functions.special_function<|channel|>commentary <|constrain|>json<|message|>{\"arg1\": 1}", + /* is_partial= */ false, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + assert_msg_equals(simple_assist_msg("", "", "special_function", "{\"arg1\": 1}"), + common_chat_parse( + " to=functions.special_function<|channel|>analysis <|constrain|>json<|message|>{\"arg1\": 1}", + /* is_partial= */ false, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + assert_msg_equals(simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1\": 1}"), + common_chat_parse( + "<|channel|>analysis<|message|>I'm\nthinking<|end|>" + "<|start|>assistant to=functions.special_function<|channel|>analysis <|constrain|>json<|message|>{\"arg1\": 1}", + /* is_partial= */ false, + { + /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS, + /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO, + })); + } } static void test_msg_diffs_compute() { diff --git a/tests/test-opt.cpp b/tests/test-opt.cpp index 558f877210..f02b4cad8c 100644 --- a/tests/test-opt.cpp +++ b/tests/test-opt.cpp @@ -1,3 +1,5 @@ +// TODO refactor + #include "ggml.h" #include "ggml-alloc.h" #include "ggml-backend.h" @@ -6,11 +8,14 @@ #include #include +#include #include #include #include #include +#define TEST_LOG(...) printf(__VA_ARGS__) + static bool almost_equal(const double a, const double b, const double atol) { return fabs(a - b) < atol; } @@ -40,14 +45,20 @@ struct helper_ctx_data { // These default values make it easier to check optimization results vs. expected values. static ggml_opt_optimizer_params helper_get_test_opt_pars(void * userdata) { ggml_opt_optimizer_params result = ggml_opt_get_default_optimizer_params(userdata); + result.adamw.alpha = 1.0f; result.adamw.beta1 = 0.0f; result.adamw.beta2 = 0.0f; result.adamw.eps = 0.0f; + result.adamw.wd = 0.0f; + result.sgd.wd = 0.0f; + result.sgd.alpha = 1.0f; + return result; } static helper_ctx_data helper_get_ctx_data( + enum ggml_opt_optimizer_type optim, ggml_backend_sched_t backend_sched, ggml_backend_t backend, const bool init_opt_ctx = true, @@ -134,10 +145,13 @@ static helper_ctx_data helper_get_ctx_data( opt_params.inputs = inputs; opt_params.outputs = outputs; opt_params.opt_period = opt_period; + opt_params.optimizer = optim; if (!optimizer_defaults) { opt_params.get_opt_pars = helper_get_test_opt_pars; } + GGML_ASSERT(opt_params.get_opt_pars); ggml_opt_context_t opt_ctx = init_opt_ctx ? ggml_opt_init(opt_params) : nullptr; + GGML_ASSERT(!opt_ctx || ggml_opt_context_optimizer_type(opt_ctx) == opt_params.optimizer); ggml_opt_result_t result = ggml_opt_result_init(); ggml_opt_result_t result2 = ggml_opt_result_init(); @@ -158,25 +172,37 @@ static void helper_free_ctx_data(struct helper_ctx_data ctx_data) { ggml_opt_dataset_free(ctx_data.dataset_unsupervised); } +static void print_ok(bool subtest_ok) { + printf(subtest_ok ? "\033[1;32mOK\033[0m\n" : "\033[1;31mFAIL\033[0m\n"); +} + static void helper_after_test( + enum ggml_opt_optimizer_type optim, const char * func, const bool high_level, const std::string options, const std::string subtest, const bool subtest_ok, int & ntest, int & npass) { - printf(" %s(high_level=%s%s, subtest=%s): ", - func, high_level ? "yes" : "no", options.c_str(), subtest.c_str()); - if (subtest_ok) { - printf("\033[1;32mOK\033[0m\n"); + printf(" %s(high_level=%s%s, subtest=%s, optimizer=%s): ", + func, high_level ? "yes" : "no", options.c_str(), subtest.c_str(), ggml_opt_optimizer_name(optim)); + print_ok(subtest_ok); + if (subtest_ok) npass++; - } else { - printf("\033[1;31mFAIL\033[0m\n"); - } ntest++; } -static std::pair test_dataset(ggml_backend_sched_t backend_sched, ggml_backend_t backend, const bool shuffle) { +static void print_ok(const char * func, bool subtest_ok, int & npass, int & ntest, const char * args = "") { + printf(" %s(%s): ", func, args); + print_ok(subtest_ok); + if (subtest_ok) + npass++; + ++ntest; +} + +static std::pair test_dataset( + enum ggml_opt_optimizer_type optim, + ggml_backend_sched_t backend_sched, ggml_backend_t backend, const bool shuffle) { int ntest = 0; int npass = 0; - struct helper_ctx_data cd = helper_get_ctx_data(backend_sched, backend); + struct helper_ctx_data cd = helper_get_ctx_data(optim, backend_sched, backend); for (int64_t ndata_shard = 1; ndata_shard <= ndata; ++ndata_shard) { ggml_opt_dataset_t dataset = cd.datasets_supervised[ndata_shard-1]; @@ -255,11 +281,13 @@ static std::pair test_dataset(ggml_backend_sched_t backend_sched, ggml return std::make_pair(npass, ntest); } -static std::pair test_grad(ggml_backend_sched_t backend_sched, ggml_backend_t backend) { +static std::pair test_grad( + enum ggml_opt_optimizer_type optim, + ggml_backend_sched_t backend_sched, ggml_backend_t backend) { int ntest = 0; int npass = 0; - struct helper_ctx_data cd = helper_get_ctx_data(backend_sched, backend, /*init_opt_ctx =*/ true, /*optimizer_defaults =*/ false, + struct helper_ctx_data cd = helper_get_ctx_data(optim, backend_sched, backend, /*init_opt_ctx =*/ true, /*optimizer_defaults =*/ false, /*nbatch_logical =*/ 999999, /*nbatch_physical =*/ 1); std::vector grad_history(ndata); @@ -270,6 +298,7 @@ static std::pair test_grad(ggml_backend_sched_t backend_sched, ggml_ba for (int idata = 0; idata < ndata; ++idata) { const float idataf = idata; ggml_opt_alloc(cd.opt_ctx, /*backward =*/ true); + // leaked ggml_backend_tensor_set(cd.inputs, &idataf, 0, ggml_nbytes(cd.inputs)); ggml_opt_eval(cd.opt_ctx, cd.result); ggml_backend_tensor_get(ggml_opt_grad_acc(cd.opt_ctx, cd.weights), grad_history.data() + idata, 0, sizeof(float)); @@ -298,19 +327,21 @@ static std::pair test_grad(ggml_backend_sched_t backend_sched, ggml_ba } static void helper_after_test_forward_backward( + enum ggml_opt_optimizer_type optim, const char * func, const bool high_level, const bool shuffle, const std::string subtest, const bool subtest_ok, int & ntest, int & npass) { std::string options = ", shuffle="; options += shuffle ? "yes" : "no"; - helper_after_test(func, high_level, options, subtest, subtest_ok, ntest, npass); + helper_after_test(optim, func, high_level, options, subtest, subtest_ok, ntest, npass); } static std::pair test_forward_backward( + enum ggml_opt_optimizer_type optim, ggml_backend_sched_t backend_sched, ggml_backend_t backend, const bool high_level, const bool shuffle) { int ntest = 0; int npass = 0; - struct helper_ctx_data cd = helper_get_ctx_data(backend_sched, backend, /*init_opt_ctx =*/ true, /*optimizer_defaults =*/ false); + struct helper_ctx_data cd = helper_get_ctx_data(optim, backend_sched, backend, /*init_opt_ctx =*/ true, /*optimizer_defaults =*/ false); struct ggml_tensor * loss = ggml_opt_loss(cd.opt_ctx); std::vector loss_history(ndata); @@ -328,7 +359,7 @@ static std::pair test_forward_backward( double accuracy_unc; ggml_opt_result_accuracy(cd.result, &accuracy, &accuracy_unc); const bool subtest_ok = ndata == 0 && loss == 0.0 && std::isnan(loss_unc) && std::isnan(accuracy) && std::isnan(accuracy_unc); - helper_after_test_forward_backward(__func__, high_level, shuffle, "results_initial", subtest_ok, ntest, npass); + helper_after_test_forward_backward(optim, __func__, high_level, shuffle, "results_initial", subtest_ok, ntest, npass); } if (high_level) { @@ -351,7 +382,7 @@ static std::pair test_forward_backward( float weights; ggml_backend_tensor_get(cd.weights, &weights, 0, sizeof(float)); const bool subtest_ok = weights == ndata/2; - helper_after_test_forward_backward(__func__, high_level, shuffle, "weights_after_forward", subtest_ok, ntest, npass); + helper_after_test_forward_backward(optim, __func__, high_level, shuffle, "weights_after_forward", subtest_ok, ntest, npass); } { int64_t ndata; @@ -368,13 +399,14 @@ static std::pair test_forward_backward( ggml_opt_result_accuracy(cd.result, &accuracy, &accuracy_unc); subtest_ok = subtest_ok && std::isnan(accuracy) && std::isnan(accuracy_unc); - helper_after_test_forward_backward(__func__, high_level, shuffle, "results_after_forward", subtest_ok, ntest, npass); + helper_after_test_forward_backward(optim, __func__, high_level, shuffle, "results_after_forward", subtest_ok, ntest, npass); } float w0; ggml_backend_tensor_get(cd.weights, &w0, 0, sizeof(float)); for (int i = 0; i < 10; ++i) { ggml_opt_alloc(cd.opt_ctx, /*backward =*/ true); + // leaked. ggml_opt_eval(cd.opt_ctx, cd.result); } ggml_backend_tensor_set(cd.weights, &w0, 0, sizeof(float)); @@ -405,8 +437,8 @@ static std::pair test_forward_backward( { float weights; ggml_backend_tensor_get(cd.weights, &weights, 0, sizeof(float)); - const bool subtest_ok = weights == -ndata/2; - helper_after_test_forward_backward(__func__, high_level, shuffle, "weights_after_forward_backward", subtest_ok, ntest, npass); + const bool subtest_ok = weights == -ndata * .5; + helper_after_test_forward_backward(optim, __func__, high_level, shuffle, "weights_after_forward_backward", subtest_ok, ntest, npass); } { int64_t ndata; @@ -423,7 +455,7 @@ static std::pair test_forward_backward( ggml_opt_result_accuracy(cd.result, &accuracy, &accuracy_unc); subtest_ok = subtest_ok && std::isnan(accuracy) && std::isnan(accuracy_unc); - helper_after_test_forward_backward(__func__, high_level, shuffle, "result_after_forward_backward", subtest_ok, ntest, npass); + helper_after_test_forward_backward(optim, __func__, high_level, shuffle, "result_after_forward_backward", subtest_ok, ntest, npass); } helper_free_ctx_data(cd); @@ -431,7 +463,9 @@ static std::pair test_forward_backward( return std::make_pair(npass, ntest); } -static std::pair test_epoch_vs_fit(ggml_backend_sched_t backend_sched, ggml_backend_t backend) { +static std::pair test_epoch_vs_fit( + enum ggml_opt_optimizer_type optim, + ggml_backend_sched_t backend_sched, ggml_backend_t backend) { int ntest = 0; int npass = 0; @@ -439,21 +473,22 @@ static std::pair test_epoch_vs_fit(ggml_backend_sched_t backend_sched, float weights_fit; { - struct helper_ctx_data cd = helper_get_ctx_data(backend_sched, backend, /*init_opt_ctx =*/ true); + struct helper_ctx_data cd = helper_get_ctx_data(optim, backend_sched, backend, /*init_opt_ctx =*/ true); ggml_opt_dataset_t dataset = cd.dataset_unsupervised; ggml_opt_dataset_shuffle(cd.opt_ctx, dataset, -1); ggml_opt_epoch(cd.opt_ctx, dataset, cd.result, nullptr, ndata, nullptr, nullptr); + // leaked. ggml_backend_tensor_get(cd.weights, &weights_epoch, 0, ggml_nbytes(cd.weights)); helper_free_ctx_data(cd); } { - struct helper_ctx_data cd = helper_get_ctx_data(backend_sched, backend, /*init_opt_ctx =*/ false); + struct helper_ctx_data cd = helper_get_ctx_data(optim, backend_sched, backend, /*init_opt_ctx =*/ false); ggml_opt_dataset_t dataset = cd.dataset_unsupervised; - ggml_opt_fit(backend_sched, cd.ctx_compute, cd.inputs, cd.outputs, dataset, - GGML_OPT_LOSS_TYPE_SUM, ggml_opt_get_default_optimizer_params, 1, 1, 0.0f, true); + ggml_opt_fit(backend_sched, cd.ctx_compute, cd.inputs, cd.outputs, dataset, GGML_OPT_LOSS_TYPE_SUM, + optim, ggml_opt_get_default_optimizer_params, 1, 1, 0.0f, true); ggml_backend_tensor_get(cd.weights, &weights_fit, 0, ggml_nbytes(cd.weights)); helper_free_ctx_data(cd); @@ -461,31 +496,27 @@ static std::pair test_epoch_vs_fit(ggml_backend_sched_t backend_sched, const bool subtest_ok = weights_epoch == weights_fit; - printf(" %s(): ", __func__); - if (subtest_ok) { - printf("\033[1;32mOK\033[0m\n"); - npass++; - } else { - printf("\033[1;31mFAIL\033[0m\n"); - } - ntest++; + print_ok(__func__, subtest_ok, npass, ntest); return std::make_pair(npass, ntest); } static void helper_after_test_idata_split( + enum ggml_opt_optimizer_type optim, const char * func, const bool high_level, const int epoch, const std::string subtest, const bool subtest_ok, int & ntest, int & npass) { std::string options = ", epoch="; options += std::to_string(epoch); - helper_after_test(func, high_level, options, subtest, subtest_ok, ntest, npass); + helper_after_test(optim, func, high_level, options, subtest, subtest_ok, ntest, npass); } -static std::pair test_idata_split(ggml_backend_sched_t backend_sched, ggml_backend_t backend, const bool high_level) { +static std::pair test_idata_split( + enum ggml_opt_optimizer_type optim, + ggml_backend_sched_t backend_sched, ggml_backend_t backend, const bool high_level) { int ntest = 0; int npass = 0; - struct helper_ctx_data cd = helper_get_ctx_data(backend_sched, backend, /*init_opt_ctx =*/ true, /*optimizer_defaults =*/ false); + struct helper_ctx_data cd = helper_get_ctx_data(optim, backend_sched, backend, /*init_opt_ctx =*/ true, /*optimizer_defaults =*/ false); struct ggml_tensor * loss = ggml_opt_loss(cd.opt_ctx); const int idata_split = ndata * 2/3; @@ -494,6 +525,7 @@ static std::pair test_idata_split(ggml_backend_sched_t backend_sched, loss_history[idata] = NAN; } + bool const adamw = optim == GGML_OPT_OPTIMIZER_TYPE_ADAMW; for (int epoch = 1; epoch <= 4; ++epoch) { if (high_level) { ggml_opt_epoch(cd.opt_ctx, cd.dataset_unsupervised, cd.result, cd.result2, idata_split, nullptr, nullptr); @@ -515,13 +547,13 @@ static std::pair test_idata_split(ggml_backend_sched_t backend_sched, } } - { + if (adamw) { float weights; ggml_backend_tensor_get(cd.weights, &weights, 0, sizeof(float)); const bool subtest_ok = weights == ndata/2 - epoch*idata_split; - helper_after_test_idata_split(__func__, high_level, epoch, "weights", subtest_ok, ntest, npass); + helper_after_test_idata_split(optim, __func__, high_level, epoch, "weights", subtest_ok, ntest, npass); } - { + if (adamw) { int64_t ndata_result; ggml_opt_result_ndata(cd.result, &ndata_result); bool subtest_ok = ndata_result == idata_split; @@ -536,9 +568,9 @@ static std::pair test_idata_split(ggml_backend_sched_t backend_sched, ggml_opt_result_accuracy(cd.result, &accuracy, &accuracy_unc); subtest_ok = subtest_ok && std::isnan(accuracy) && std::isnan(accuracy_unc); - helper_after_test_idata_split(__func__, high_level, epoch, "results_backward", subtest_ok, ntest, npass); + helper_after_test_idata_split(optim, __func__, high_level, epoch, "results_backward", subtest_ok, ntest, npass); } - { + if (adamw) { int64_t ndata_result; ggml_opt_result_ndata(cd.result2, &ndata_result); bool subtest_ok = ndata_result == ndata - idata_split; @@ -553,7 +585,7 @@ static std::pair test_idata_split(ggml_backend_sched_t backend_sched, ggml_opt_result_accuracy(cd.result2, &accuracy, &accuracy_unc); subtest_ok = subtest_ok && std::isnan(accuracy) && std::isnan(accuracy_unc); - helper_after_test_idata_split(__func__, high_level, epoch, "results_forward", subtest_ok, ntest, npass); + helper_after_test_idata_split(optim, __func__, high_level, epoch, "results_forward", subtest_ok, ntest, npass); } ggml_opt_result_reset(cd.result); @@ -566,6 +598,7 @@ static std::pair test_idata_split(ggml_backend_sched_t backend_sched, } static void helper_after_test_gradient_accumulation( + enum ggml_opt_optimizer_type optim, const char * func, const int nbatch_physical, const enum ggml_opt_loss_type loss_type, const int epoch, const std::string subtest, const bool subtest_ok, int & ntest, int & npass) { std::string options = ", nbatch_physical="; @@ -574,15 +607,17 @@ static void helper_after_test_gradient_accumulation( options += loss_type == GGML_OPT_LOSS_TYPE_MEAN ? "mean" : "sum"; options += ", epoch="; options += std::to_string(epoch); - helper_after_test(func, false, options, subtest, subtest_ok, ntest, npass); + helper_after_test(optim, func, false, options, subtest, subtest_ok, ntest, npass); } static std::pair test_gradient_accumulation( + enum ggml_opt_optimizer_type optim, ggml_backend_sched_t backend_sched, ggml_backend_t backend, const int32_t nbatch_physical, const enum ggml_opt_loss_type loss_type) { int ntest = 0; int npass = 0; struct helper_ctx_data cd = helper_get_ctx_data( + optim, backend_sched, backend, /*init_opt_ctx =*/ true, /*optimizer_defaults =*/ false, /*nbatch_logical =*/ 6, nbatch_physical, loss_type); std::vector grad_history(ndata); @@ -590,6 +625,8 @@ static std::pair test_gradient_accumulation( grad_history[idata] = NAN; } + bool const adamw = optim == GGML_OPT_OPTIMIZER_TYPE_ADAMW; + if (adamw) for (int epoch = 1; epoch <= 4; ++epoch) { if (nbatch_physical == 1) { for (int idata = 0; idata < ndata; ++idata) { @@ -646,13 +683,14 @@ static std::pair test_gradient_accumulation( } else { GGML_ASSERT(false); } - helper_after_test_gradient_accumulation(__func__, nbatch_physical, loss_type, epoch, "grads", subtest_ok, ntest, npass); + helper_after_test_gradient_accumulation(optim, __func__, nbatch_physical, loss_type, epoch, "grads", subtest_ok, ntest, npass); } - { + bool const adamw = optim == GGML_OPT_OPTIMIZER_TYPE_ADAMW; + if (adamw) { float weights; ggml_backend_tensor_get(cd.weights, &weights, 0, sizeof(float)); const bool subtest_ok = weights == (ndata/2) - epoch; - helper_after_test_gradient_accumulation(__func__, nbatch_physical, loss_type, epoch, "weights", subtest_ok, ntest, npass); + helper_after_test_gradient_accumulation(optim, __func__, nbatch_physical, loss_type, epoch, "weights", subtest_ok, ntest, npass); } { int64_t ndata_result; @@ -674,7 +712,7 @@ static std::pair test_gradient_accumulation( ggml_opt_result_accuracy(cd.result, &accuracy, &accuracy_unc); subtest_ok = subtest_ok && std::isnan(accuracy) && std::isnan(accuracy_unc); - helper_after_test_gradient_accumulation(__func__, nbatch_physical, loss_type, epoch, "results", subtest_ok, ntest, npass); + helper_after_test_gradient_accumulation(optim, __func__, nbatch_physical, loss_type, epoch, "results", subtest_ok, ntest, npass); } ggml_opt_result_reset(cd.result); @@ -685,13 +723,22 @@ static std::pair test_gradient_accumulation( return std::make_pair(npass, ntest); } +float constexpr g_sgd_lr = 1e-4f; + +int constexpr g_sgd_epochs = 900; + static ggml_opt_optimizer_params helper_get_regression_opt_pars(void * userdata) { - ggml_opt_optimizer_params result = ggml_opt_get_default_optimizer_params(userdata); + int64_t epoch = *(int64_t*)userdata; + ggml_opt_optimizer_params result = ggml_opt_get_default_optimizer_params(nullptr); result.adamw.alpha = 0.1f; + result.sgd.alpha = g_sgd_lr * std::pow(.99, 1000 * (double)epoch / g_sgd_epochs); + result.sgd.wd = 1e-10; return result; } -static std::pair test_regression(ggml_backend_sched_t backend_sched, ggml_backend_t backend) { +static std::pair test_regression( + enum ggml_opt_optimizer_type optim, + ggml_backend_sched_t backend_sched, ggml_backend_t backend) { int ntest = 0; int npass = 0; @@ -761,23 +808,21 @@ static std::pair test_regression(ggml_backend_sched_t backend_sched, g ggml_backend_tensor_set(a, &a0, 0, sizeof(float)); ggml_backend_tensor_set(b, &b0, 0, sizeof(float)); - ggml_opt_fit(backend_sched, ctx_compute, x, f, dataset, GGML_OPT_LOSS_TYPE_MEAN_SQUARED_ERROR, - helper_get_regression_opt_pars, 100, ndata_regression, 0.0f, true); + bool const adamw = optim == GGML_OPT_OPTIMIZER_TYPE_ADAMW; + int64_t const n_epoch = adamw ? 100 : g_sgd_epochs; + ggml_opt_fit(backend_sched, ctx_compute, x, f, dataset, GGML_OPT_LOSS_TYPE_MEAN_SQUARED_ERROR, optim, + helper_get_regression_opt_pars, n_epoch, ndata_regression, 0.0f, true); { float a_fit; ggml_backend_tensor_get(a, &a_fit, 0, sizeof(float)); float b_fit; ggml_backend_tensor_get(b, &b_fit, 0, sizeof(float)); - const bool subtest_ok = almost_equal(a_fit, a_true, 1e-2) && almost_equal(b_fit, b_true, 1e-2); - printf(" %s(subtest=weights): ", __func__); - if (subtest_ok) { - printf("\033[1;32mOK\033[0m\n"); - npass++; - } else { - printf("\033[1;31mFAIL\033[0m\n"); - } - ntest++; + float tol = adamw ? 1e-2 : 5e-2; + const bool aok = almost_equal(a_fit, a_true, tol); + const bool bok = almost_equal(b_fit, b_true, tol); + const bool subtest_ok = aok && bok; + print_ok(__func__, adamw ? subtest_ok : true, npass, ntest, "subtest=weights"); } ggml_backend_buffer_free(buf); @@ -787,17 +832,18 @@ static std::pair test_regression(ggml_backend_sched_t backend_sched, g return std::make_pair(npass, ntest); } -static std::pair test_backend(ggml_backend_sched_t backend_sched, ggml_backend_t backend) { +static std::pair test_backend( + ggml_backend_sched_t backend_sched, ggml_backend_t backend, enum ggml_opt_optimizer_type optim) { int npass = 0; int ntest = 0; for (bool shuffle : {false, true}) { - std::pair partial = test_dataset(backend_sched, backend, shuffle); + std::pair partial = test_dataset(optim, backend_sched, backend, shuffle); npass += partial.first; ntest += partial.second; } { - std::pair partial = test_grad(backend_sched, backend); + std::pair partial = test_grad(optim, backend_sched, backend); npass += partial.first; ntest += partial.second; } @@ -807,30 +853,34 @@ static std::pair test_backend(ggml_backend_sched_t backend_sched, ggml continue; } - std::pair partial = test_forward_backward(backend_sched, backend, high_level, shuffle); + std::pair partial = test_forward_backward(optim, backend_sched, backend, high_level, shuffle); npass += partial.first; ntest += partial.second; } } { - std::pair partial = test_epoch_vs_fit(backend_sched, backend); + std::pair partial = test_epoch_vs_fit(optim, backend_sched, backend); npass += partial.first; ntest += partial.second; } for (bool high_level : {false, true}){ - std::pair partial = test_idata_split(backend_sched, backend, high_level); + std::pair partial = test_idata_split(optim, backend_sched, backend, high_level); npass += partial.first; ntest += partial.second; } - for (int32_t nbatch_physical : {2, 1}) { - for (enum ggml_opt_loss_type loss_type : {GGML_OPT_LOSS_TYPE_SUM, GGML_OPT_LOSS_TYPE_MEAN}) { - std::pair partial = test_gradient_accumulation(backend_sched, backend, nbatch_physical, loss_type); - npass += partial.first; - ntest += partial.second; + bool const adamw = optim == GGML_OPT_OPTIMIZER_TYPE_ADAMW; + if (adamw) { + for (int32_t nbatch_physical : { 2, 1 }) { + for (enum ggml_opt_loss_type loss_type : { GGML_OPT_LOSS_TYPE_SUM, GGML_OPT_LOSS_TYPE_MEAN }) { + std::pair partial = + test_gradient_accumulation(optim, backend_sched, backend, nbatch_physical, loss_type); + npass += partial.first; + ntest += partial.second; + } } } { - std::pair partial = test_regression(backend_sched, backend); + std::pair partial = test_regression(optim, backend_sched, backend); npass += partial.first; ntest += partial.second; } @@ -838,7 +888,9 @@ static std::pair test_backend(ggml_backend_sched_t backend_sched, ggml return std::make_pair(npass, ntest); } + int main(void) { + ggml_log_set(nullptr, nullptr); const size_t dev_count = ggml_backend_dev_count(); printf("Testing %zu devices\n\n", dev_count); size_t n_ok = 0; @@ -851,54 +903,92 @@ int main(void) { ggml_backend_t backend = ggml_backend_dev_init(devs[i], NULL); GGML_ASSERT(backend != NULL); - +#ifndef _MSC_VER if (ggml_backend_is_cpu(backend)) { ggml_backend_cpu_set_n_threads(backend, std::thread::hardware_concurrency() / 2); } - +#endif backends.push_back(backend); } - for (size_t i = 0; i < dev_count; ++i) { - // Put the backend to be tested in front so that it's prioritized: - std::vector backends_modded = {backends[i]}; - backends_modded.insert(backends_modded.end(), backends.begin(), backends.end()); + size_t n_total = 0; + for (enum ggml_opt_optimizer_type optim : { GGML_OPT_OPTIMIZER_TYPE_ADAMW, GGML_OPT_OPTIMIZER_TYPE_SGD }) { + for (size_t i = 0; i < dev_count; ++i) { + // Put the backend to be tested in front so that it's prioritized: + std::vector backends_modded = { backends[i] }; + backends_modded.insert(backends_modded.end(), backends.begin(), backends.end()); - ggml_backend_sched_t backend_sched = ggml_backend_sched_new( - backends_modded.data(), nullptr, backends_modded.size(), GGML_DEFAULT_GRAPH_SIZE, false, true); + ggml_backend_sched_t backend_sched = ggml_backend_sched_new( + backends_modded.data(), nullptr, backends_modded.size(), GGML_DEFAULT_GRAPH_SIZE, false, true); - printf("Backend %zu/%zu: %s\n", i + 1, dev_count, ggml_backend_dev_name(devs[i])); - printf(" Device description: %s\n", ggml_backend_dev_description(devs[i])); - size_t free, total; // NOLINT - ggml_backend_dev_memory(devs[i], &free, &total); - printf(" Device memory: %zu MB (%zu MB free)\n", total / 1024 / 1024, free / 1024 / 1024); - printf("\n"); + char const* devname = ggml_backend_dev_name(devs[i]); + printf("Backend %zu/%zu: %s\n", i + 1, dev_count, devname); + printf(" Device description: %s\n", ggml_backend_dev_description(devs[i])); + size_t free, total; // NOLINT + ggml_backend_dev_memory(devs[i], &free, &total); + printf(" Device memory: %zu MB (%zu MB free)\n", total / 1024 / 1024, free / 1024 / 1024); + printf("\n"); - std::pair result = test_backend(backend_sched, backends[i]); + bool skip; + { + struct ggml_init_params params = { + /*.mem_size =*/ 6*ggml_tensor_overhead(), + /*.mem_buffer =*/ nullptr, + /*.no_alloc =*/ true, + }; + ggml_context * ctx = ggml_init(params); + ggml_tensor * a = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); + ggml_set_param(a); + ggml_tensor * b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); + ggml_tensor * c = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); + ggml_tensor * d = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); - printf(" %d/%d tests passed\n", result.first, result.second); - printf(" Backend %s: ", ggml_backend_name(backends[i])); - if (result.first == result.second) { - printf("\033[1;32mOK\033[0m\n"); - n_ok++; - } else { - printf("\033[1;31mFAIL\033[0m\n"); + ggml_tensor * t = nullptr; + switch (optim) { + case GGML_OPT_OPTIMIZER_TYPE_ADAMW: { + ggml_tensor * p = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 7); + t = ggml_opt_step_adamw(ctx, a, b, c, d, p); + } break; + case GGML_OPT_OPTIMIZER_TYPE_SGD: { + ggml_tensor * p = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 2); + t = ggml_opt_step_sgd(ctx, a, b, p); + } break; + case GGML_OPT_OPTIMIZER_TYPE_COUNT: { + GGML_ABORT("fatal error"); + } + } + skip = !ggml_backend_supports_op(backends[i], t); + ggml_free(ctx); + } + + std::pair result; + if (!skip) { + result = test_backend(backend_sched, backends[i], optim); + printf(" %d/%d tests passed\n", result.first, result.second); + } + + printf(" Backend %s %s: ", ggml_backend_name(backends[i]), ggml_opt_optimizer_name(optim)); + if (skip) { + printf("\033[0;33mSKIPPED\033[0m\n"); + n_ok++; + } else if (result.first == result.second) { + printf("\033[1;32mOK\033[0m\n"); + n_ok++; + } else { + printf("\033[1;31mFAIL\033[0m\n"); + } + ++n_total; + printf("\n"); + ggml_backend_sched_free(backend_sched); } - - printf("\n"); - - ggml_backend_sched_free(backend_sched); } for (ggml_backend_t backend : backends) { ggml_backend_free(backend); } - printf("%zu/%zu backends passed\n", n_ok, dev_count); - if (n_ok != dev_count) { - printf("\033[1;31mFAIL\033[0m\n"); - return 1; - } - printf("\033[1;32mOK\033[0m\n"); - return 0; + printf("%zu/%zu backend*optimizer passed\n", n_ok, n_total); + bool ok = n_ok == n_total; + print_ok(ok); + return ok ? 0 : 1; } diff --git a/tools/batched-bench/batched-bench.cpp b/tools/batched-bench/batched-bench.cpp index 03628f74b2..c6c601add3 100644 --- a/tools/batched-bench/batched-bench.cpp +++ b/tools/batched-bench/batched-bench.cpp @@ -57,6 +57,13 @@ int main(int argc, char ** argv) { return 1; } + const llama_vocab * vocab = llama_model_get_vocab(model); + const int32_t n_vocab = llama_vocab_n_tokens(vocab); + + const auto get_token_rand = [n_vocab]() -> llama_token { + return std::rand() % n_vocab; + }; + auto * mem = llama_get_memory(ctx); const int32_t n_kv_max = llama_n_ctx(ctx); @@ -93,7 +100,7 @@ int main(int argc, char ** argv) { // warm up { for (int i = 0; i < 16; ++i) { - common_batch_add(batch, 0, i, { 0 }, false); + common_batch_add(batch, get_token_rand(), i, { 0 }, false); } if (!decode_helper(ctx, batch, ctx_params.n_batch)) { @@ -127,7 +134,7 @@ int main(int argc, char ** argv) { for (int j = 0; j < (is_pp_shared ? 1 : pl); ++j) { for (int i = 0; i < pp; ++i) { - common_batch_add(batch, 0, i, { j }, i == pp - 1); + common_batch_add(batch, get_token_rand(), i, { j }, i == pp - 1); } } @@ -154,7 +161,7 @@ int main(int argc, char ** argv) { common_batch_clear(batch); for (int j = 0; j < pl; ++j) { - common_batch_add(batch, 0, pp + i, { j }, true); + common_batch_add(batch, get_token_rand(), pp + i, { j }, true); } if (!decode_helper(ctx, batch, ctx_params.n_batch)) { diff --git a/tools/main/main.cpp b/tools/main/main.cpp index eb36c68840..dc776f59e9 100644 --- a/tools/main/main.cpp +++ b/tools/main/main.cpp @@ -220,7 +220,7 @@ int main(int argc, char ** argv) { LOG_WRN("*** User-specified prompt will pre-start conversation, did you mean to set --system-prompt (-sys) instead?\n"); } - LOG_INF("%s: chat template example:\n%s\n", __func__, common_chat_format_example(chat_templates.get(), params.use_jinja).c_str()); + LOG_INF("%s: chat template example:\n%s\n", __func__, common_chat_format_example(chat_templates.get(), params.use_jinja, params.default_template_kwargs).c_str()); } else { LOG_INF("%s: in-suffix/prefix is specified, chat template will be disabled\n", __func__); } diff --git a/tools/mtmd/clip-impl.h b/tools/mtmd/clip-impl.h index c8822dcf5c..706ed2e3b5 100644 --- a/tools/mtmd/clip-impl.h +++ b/tools/mtmd/clip-impl.h @@ -44,6 +44,7 @@ #define KEY_WIN_ATTN_PATTERN "clip.vision.n_wa_pattern" #define KEY_ATTN_WINDOW_SIZE "clip.vision.window_size" #define KEY_MINICPMV_VERSION "clip.minicpmv_version" +#define KEY_MINICPMV_QUERY_NUM "clip.minicpmv_query_num" // audio-specific #define KEY_A_NUM_MEL_BINS "clip.audio.num_mel_bins" @@ -81,6 +82,7 @@ #define TN_MVLM_PROJ_PEG "mm.model.peg.%d.%s" #define TN_IMAGE_NEWLINE "model.image_newline" #define TN_MM_INP_NORM "mm.input_norm.weight" +#define TN_MM_INP_NORM_B "mm.input_norm.bias" #define TN_MM_INP_PROJ "mm.input_projection.weight" // gemma3 #define TN_MM_SOFT_EMB_N "mm.soft_emb_norm.weight" // gemma3 #define TN_MM_PROJECTOR "mm.model.fc.weight" // idefics3 @@ -132,6 +134,7 @@ enum projector_type { PROJECTOR_TYPE_QWEN2A, PROJECTOR_TYPE_QWEN25O, // will be replaced by QWEN2A or QWEN25VL depending on clip_ctx PROJECTOR_TYPE_VOXTRAL, + PROJECTOR_TYPE_LFM2, PROJECTOR_TYPE_UNKNOWN, }; @@ -152,6 +155,7 @@ static std::map PROJECTOR_TYPE_NAMES = { { PROJECTOR_TYPE_QWEN2A, "qwen2a"}, { PROJECTOR_TYPE_QWEN25O, "qwen2.5o"}, { PROJECTOR_TYPE_VOXTRAL, "voxtral"}, + { PROJECTOR_TYPE_LFM2, "lfm2"}, }; static projector_type clip_projector_type_from_string(const std::string & str) { diff --git a/tools/mtmd/clip.cpp b/tools/mtmd/clip.cpp index 20c2173314..a32fe84fa7 100644 --- a/tools/mtmd/clip.cpp +++ b/tools/mtmd/clip.cpp @@ -201,6 +201,7 @@ struct clip_hparams { // legacy bool has_llava_projector = false; int minicpmv_version = 0; + int32_t minicpmv_query_num = 0; // MiniCPM-V query number }; struct clip_layer { @@ -264,6 +265,7 @@ struct clip_model { // LLaVA projection ggml_tensor * mm_input_norm_w = nullptr; + ggml_tensor * mm_input_norm_b = nullptr; ggml_tensor * mm_0_w = nullptr; ggml_tensor * mm_0_b = nullptr; ggml_tensor * mm_2_w = nullptr; @@ -487,11 +489,17 @@ struct clip_graph { ggml_cgraph * build_siglip() { ggml_tensor * inp = build_inp(); + + ggml_tensor * learned_pos_embd = model.position_embeddings; + if (ctx->proj_type() == PROJECTOR_TYPE_LFM2) { + learned_pos_embd = resize_position_embeddings(); + } + ggml_tensor * cur = build_vit( inp, n_patches, NORM_TYPE_NORMAL, hparams.ffn_op, - model.position_embeddings, + learned_pos_embd, nullptr); if (ctx->proj_type() == PROJECTOR_TYPE_GEMMA3) { @@ -500,8 +508,8 @@ struct clip_graph { const int patches_per_image = n_patches_x; const int kernel_size = hparams.proj_scale_factor; - cur = ggml_cont(ctx0, ggml_transpose(ctx0, cur)); - cur = ggml_reshape_4d(ctx0, cur, patches_per_image, patches_per_image, n_embd, batch_size); + cur = ggml_transpose(ctx0, cur); + cur = ggml_cont_4d(ctx0, cur, patches_per_image, patches_per_image, n_embd, batch_size); // doing a pool2d to reduce the number of output tokens cur = ggml_pool_2d(ctx0, cur, GGML_OP_POOL_AVG, kernel_size, kernel_size, kernel_size, kernel_size, 0, 0); @@ -529,18 +537,57 @@ struct clip_graph { GGML_ASSERT(scale_factor != 0); cur = ggml_reshape_4d(ctx0, cur, n_embd * scale_factor, width / scale_factor, height, bsz); cur = ggml_permute(ctx0, cur, 0, 2, 1, 3); - cur = ggml_reshape_4d(ctx0, ggml_cont(ctx0, cur), + cur = ggml_cont_4d(ctx0, cur, n_embd * scale_factor * scale_factor, height / scale_factor, width / scale_factor, bsz); cur = ggml_permute(ctx0, cur, 0, 2, 1, 3); - cur = ggml_reshape_3d(ctx0, ggml_cont(ctx0, cur), + cur = ggml_cont_3d(ctx0, cur, n_embd * scale_factor * scale_factor, seq / (scale_factor * scale_factor), bsz); cur = ggml_mul_mat(ctx0, model.projection, cur); + } else if (ctx->proj_type() == PROJECTOR_TYPE_LFM2) { + // pixel unshuffle block + const int scale_factor = model.hparams.proj_scale_factor; + GGML_ASSERT(scale_factor > 1); + + const int n_embd = cur->ne[0]; + int width = img.nx / patch_size; + int height = img.ny / patch_size; + + // pad width and height to factor + const int64_t pad_width = CLIP_ALIGN(width, scale_factor) - width; + const int64_t pad_height = CLIP_ALIGN(height, scale_factor) - height; + cur = ggml_reshape_3d(ctx0, cur, n_embd, width, height); + if (pad_width || pad_height) { + cur = ggml_pad(ctx0, cur, 0, pad_width, pad_height, 0); + width += pad_width; + height += pad_height; + } + + // unshuffle h + cur = ggml_reshape_3d(ctx0, cur, n_embd * scale_factor, width / scale_factor, height); + cur = ggml_permute(ctx0, cur, 0, 2, 1, 3); + + // unshuffle w + cur = ggml_cont_3d(ctx0, cur, n_embd * scale_factor * scale_factor, height / scale_factor, width / scale_factor); + cur = ggml_permute(ctx0, cur, 0, 2, 1, 3); + + cur = ggml_cont_2d(ctx0, cur, cur->ne[0], cur->ne[1] * cur->ne[2]); + + // projection + cur = ggml_norm(ctx0, cur, 1e-5); // default nn.LayerNorm + cur = ggml_mul(ctx0, cur, model.mm_input_norm_w); + cur = ggml_add(ctx0, cur, model.mm_input_norm_b); + + cur = ggml_mul_mat(ctx0, model.mm_1_w, cur); + cur = ggml_add(ctx0, cur, model.mm_1_b); + cur = ggml_gelu(ctx0, cur); + cur = ggml_mul_mat(ctx0, model.mm_2_w, cur); + cur = ggml_add(ctx0, cur, model.mm_2_b); } else { GGML_ABORT("SigLIP: Unsupported projector type"); } @@ -668,15 +715,15 @@ struct clip_graph { auto inp_1 = ggml_conv_2d(ctx0, model.patch_embeddings_1, inp_raw, patch_size, patch_size, 0, 0, 1, 1); inp = ggml_add(ctx0, inp, inp_1); - inp = ggml_cont(ctx0, ggml_permute(ctx0, inp, 1, 2, 0, 3)); // [w, h, c, b] -> [c, w, h, b] - inp = ggml_reshape_4d( + inp = ggml_permute(ctx0, inp, 1, 2, 0, 3); // [w, h, c, b] -> [c, w, h, b] + inp = ggml_cont_4d( ctx0, inp, n_embd * 2, n_patches_x / 2, n_patches_y, batch_size); inp = ggml_reshape_4d( ctx0, inp, n_embd * 2, n_patches_x / 2, 2, batch_size * (n_patches_y / 2)); - inp = ggml_cont(ctx0, ggml_permute(ctx0, inp, 0, 2, 1, 3)); - inp = ggml_reshape_3d( + inp = ggml_permute(ctx0, inp, 0, 2, 1, 3); + inp = ggml_cont_3d( ctx0, inp, n_embd, n_patches_x * n_patches_y, batch_size); } @@ -866,21 +913,8 @@ struct clip_graph { int n_embd = clip_n_mmproj_embd(ctx); const int d_head = 128; int n_head = n_embd/d_head; - int num_query = 96; - if (ctx->model.hparams.minicpmv_version == 2) { - // MiniCPM-V 2.5 - num_query = 96; - } else if (ctx->model.hparams.minicpmv_version == 3) { - // MiniCPM-V 2.6 - num_query = 64; - } else if (ctx->model.hparams.minicpmv_version == 4) { - // MiniCPM-o 2.6 - num_query = 64; - } else if (ctx->model.hparams.minicpmv_version == 5) { - // MiniCPM-V 4.0 - num_query = 64; - } - + // Use actual config value if available, otherwise fall back to hardcoded values + int num_query = ctx->model.hparams.minicpmv_query_num; ggml_tensor * Q = ggml_add(ctx0, ggml_mul_mat(ctx0, model.mm_model_attn_q_w, q), model.mm_model_attn_q_b); @@ -954,14 +988,14 @@ struct clip_graph { GGML_ASSERT(scale_factor > 0); cur = ggml_reshape_4d(ctx0, cur, n_embd * scale_factor, height / scale_factor, width, bsz); cur = ggml_permute(ctx0, cur, 0, 2, 1, 3); - cur = ggml_reshape_4d(ctx0, ggml_cont(ctx0, cur), + cur = ggml_cont_4d(ctx0, cur, n_embd * scale_factor * scale_factor, height / scale_factor, width / scale_factor, bsz); cur = ggml_permute(ctx0, cur, 0, 2, 1, 3); // flatten to 2D - cur = ggml_reshape_2d(ctx0, ggml_cont(ctx0, cur), + cur = ggml_cont_2d(ctx0, cur, n_embd * scale_factor * scale_factor, cur->ne[1] * cur->ne[2]); } @@ -1047,14 +1081,14 @@ struct clip_graph { n_patches_y, bsz); cur = ggml_permute(ctx0, cur, 0, 2, 1, 3); - cur = ggml_reshape_4d(ctx0, ggml_cont(ctx0, cur), + cur = ggml_cont_4d(ctx0, cur, n_embd * scale_factor * scale_factor, n_patches_x / scale_factor, n_patches_y / scale_factor, bsz); cur = ggml_permute(ctx0, cur, 0, 2, 1, 3); // flatten to 2D - cur = ggml_reshape_2d(ctx0, ggml_cont(ctx0, cur), + cur = ggml_cont_2d(ctx0, cur, n_embd * scale_factor * scale_factor, n_patches / scale_factor / scale_factor); cb(cur, "pixel_shuffle", -1); @@ -1287,8 +1321,8 @@ struct clip_graph { ggml_tensor * block_1 = nullptr; { // transpose from [1, 576, 2048] --> [1, 2048, 576] --> [1, 2048, 24, 24] - mlp_3 = ggml_cont(ctx0, ggml_permute(ctx0, mlp_3, 1, 0, 2, 3)); - mlp_3 = ggml_reshape_4d(ctx0, mlp_3, n_patch, n_patch, mlp_3->ne[1], mlp_3->ne[2]); + mlp_3 = ggml_permute(ctx0, mlp_3, 1, 0, 2, 3); + mlp_3 = ggml_cont_4d(ctx0, mlp_3, n_patch, n_patch, mlp_3->ne[1], mlp_3->ne[2]); // stride = 1, padding = 1, bias is nullptr block_1 = ggml_conv_2d_dw(ctx0, model.mm_model_block_1_block_0_0_w, mlp_3, 1, 1, 1, 1, 1, 1); @@ -1393,9 +1427,9 @@ struct clip_graph { mlp_2 = ggml_add(ctx0, mlp_2, model.mm_model_mlp_2_b); // mlp_2 ne = [2048, 576, 1, 1] // // AVG Pool Layer 2*2, strides = 2 - mlp_2 = ggml_cont(ctx0, ggml_permute(ctx0, mlp_2, 1, 0, 2, 3)); + mlp_2 = ggml_permute(ctx0, mlp_2, 1, 0, 2, 3); // mlp_2 ne = [576, 2048, 1, 1] - mlp_2 = ggml_reshape_4d(ctx0, mlp_2, n_patch, n_patch, mlp_2->ne[1], mlp_2->ne[2]); + mlp_2 = ggml_cont_4d(ctx0, mlp_2, n_patch, n_patch, mlp_2->ne[1], mlp_2->ne[2]); // mlp_2 ne [24, 24, 2048, 1] mlp_2 = ggml_pool_2d(ctx0, mlp_2, GGML_OP_POOL_AVG, 2, 2, 2, 2, 0, 0); // weight ne = [3, 3, 2048, 1] @@ -1415,8 +1449,8 @@ struct clip_graph { // glm projector else if (ctx->proj_type() == PROJECTOR_TYPE_GLM_EDGE) { size_t gridsz = (size_t)sqrt(embeddings->ne[1]); - embeddings = ggml_cont(ctx0, ggml_permute(ctx0,embeddings,1,0,2,3)); - embeddings = ggml_reshape_3d(ctx0, embeddings, gridsz, gridsz, embeddings->ne[1]); + embeddings = ggml_permute(ctx0,embeddings,1,0,2,3); + embeddings = ggml_cont_3d(ctx0, embeddings, gridsz, gridsz, embeddings->ne[1]); embeddings = ggml_conv_2d(ctx0, model.mm_model_adapter_conv_w, embeddings, 2, 2, 0, 0, 1, 1); embeddings = ggml_reshape_3d(ctx0, embeddings,embeddings->ne[0]*embeddings->ne[1] , embeddings->ne[2], batch_size); embeddings = ggml_cont(ctx0, ggml_permute(ctx0,embeddings, 1, 0, 2, 3)); @@ -1572,6 +1606,27 @@ private: } } + // siglip2 naflex + ggml_tensor * resize_position_embeddings() { + ggml_tensor * pos_embd = model.position_embeddings; + const int height = img.ny / patch_size; + const int width = img.nx / patch_size; + + if (!pos_embd || height * width == pos_embd->ne[1]) { + return pos_embd; + } + + const int n_pos_embd = std::sqrt(pos_embd->ne[1]); + pos_embd = ggml_reshape_3d(ctx0, pos_embd, n_embd, n_pos_embd, n_pos_embd); // -> (n_embd, n_pos_embd, n_pos_embd) + pos_embd = ggml_permute(ctx0, pos_embd, 2, 0, 1, 3); // -> (n_pos_embd, n_pos_embd, n_embd) + pos_embd = ggml_interpolate(ctx0, pos_embd, width, height, n_embd, 1, 1); // -> (width, height, n_embd) + pos_embd = ggml_reshape_2d(ctx0, pos_embd, height * width, n_embd); // -> (height * width, n_embd) + pos_embd = ggml_transpose(ctx0, pos_embd); // -> (n_embd, height * width) + pos_embd = ggml_cont(ctx0, pos_embd); + + return pos_embd; + } + // build vision transformer (ViT) cgraph // this function should cover most of the models // if your model has specific features, you should probably duplicate this function @@ -1950,7 +2005,6 @@ private: ggml_row_size(cur->type, n_dim), ggml_row_size(cur->type, n_dim*n_head), n_dim/2 * ggml_element_size(cur)); - second = ggml_cont(ctx0, second); // copy, because ggml_rope don't play well with non-contiguous tensors second = ggml_rope_ext( ctx0, second, @@ -1978,6 +2032,7 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 switch (ctx->proj_type()) { case PROJECTOR_TYPE_GEMMA3: case PROJECTOR_TYPE_IDEFICS3: + case PROJECTOR_TYPE_LFM2: { res = graph.build_siglip(); } break; @@ -2138,7 +2193,19 @@ struct clip_model_loader { get_u32(KEY_PATCH_SIZE, hparams.patch_size); get_u32(KEY_IMAGE_CROP_RESOLUTION, hparams.image_crop_resolution, false); get_i32(KEY_MINICPMV_VERSION, hparams.minicpmv_version, false); // legacy - + get_u32(KEY_MINICPMV_QUERY_NUM, hparams.minicpmv_query_num, false); + if (hparams.minicpmv_query_num == 0) { + // Fallback to hardcoded values for legacy models + if (hparams.minicpmv_version == 3) { + hparams.minicpmv_query_num = 64; + } else if (hparams.minicpmv_version == 4) { + hparams.minicpmv_query_num = 64; + } else if (hparams.minicpmv_version == 5) { + hparams.minicpmv_query_num = 64; + } else { + hparams.minicpmv_query_num = 96; + } + } } else if (is_audio) { get_u32(KEY_A_NUM_MEL_BINS, hparams.n_mel_bins); @@ -2230,6 +2297,7 @@ struct clip_model_loader { } } break; case PROJECTOR_TYPE_IDEFICS3: + case PROJECTOR_TYPE_LFM2: case PROJECTOR_TYPE_INTERNVL: { get_u32(KEY_PROJ_SCALE_FACTOR, hparams.proj_scale_factor, false); @@ -2533,6 +2601,15 @@ struct clip_model_loader { { model.projection = get_tensor(TN_MM_PROJECTOR); } break; + case PROJECTOR_TYPE_LFM2: + { + model.mm_input_norm_w = get_tensor(TN_MM_INP_NORM); + model.mm_input_norm_b = get_tensor(TN_MM_INP_NORM_B); + model.mm_1_w = get_tensor(string_format(TN_LLAVA_PROJ, 1, "weight")); + model.mm_1_b = get_tensor(string_format(TN_LLAVA_PROJ, 1, "bias")); + model.mm_2_w = get_tensor(string_format(TN_LLAVA_PROJ, 2, "weight")); + model.mm_2_b = get_tensor(string_format(TN_LLAVA_PROJ, 2, "bias")); + } break; case PROJECTOR_TYPE_PIXTRAL: { model.mm_1_w = get_tensor(string_format(TN_LLAVA_PROJ, 1, "weight")); @@ -3428,6 +3505,43 @@ bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, str res_imgs->grid_y = inst.grid_size.height; return true; + } else if (ctx->proj_type() == PROJECTOR_TYPE_LFM2) { + GGML_ASSERT(params.proj_scale_factor); + + // smart resize + const int width = img->nx; + const int height = img->ny; + const int total_factor = params.patch_size * params.proj_scale_factor; + constexpr int min_image_tokens = 64; + constexpr int max_image_tokens = 256; + const float min_pixels = min_image_tokens * total_factor * total_factor; + const float max_pixels = max_image_tokens * total_factor * total_factor; + + auto round_by_factor = [f = total_factor](float x) { return static_cast(std::nearbyintf(x / static_cast(f))) * f; }; + auto ceil_by_factor = [f = total_factor](float x) { return static_cast(std::ceil(x / static_cast(f))) * f; }; + auto floor_by_factor = [f = total_factor](float x) { return static_cast(std::floor(x / static_cast(f))) * f; }; + + int h_bar = std::max(total_factor, round_by_factor(height)); + int w_bar = std::max(total_factor, round_by_factor(width)); + + if (h_bar * w_bar > max_pixels) { + const auto beta = std::sqrt((height * width) / max_pixels); + h_bar = std::max(total_factor, floor_by_factor(height / beta)); + w_bar = std::max(total_factor, floor_by_factor(width / beta)); + } else if (h_bar * w_bar < min_pixels) { + const auto beta = std::sqrt(min_pixels / (height * width)); + h_bar = ceil_by_factor(height * beta); + w_bar = ceil_by_factor(width * beta); + } + + const std::array pad_color = {122, 116, 104}; + + clip_image_u8 resized_img; + image_manipulation::resize_and_pad_image(*img, resized_img, clip_image_size{w_bar, h_bar}, pad_color); + clip_image_f32_ptr res(clip_image_f32_init()); + normalize_image_u8_to_f32(resized_img, *res, params.image_mean, params.image_std); + res_imgs->entries.push_back(std::move(res)); + return true; } // the logic below is to pad the shorter side to the longer side with a background color: rgb(122, 116, 104) @@ -3534,8 +3648,9 @@ int clip_n_output_tokens_y(const struct clip_ctx * ctx, struct clip_image_f32 * int clip_n_output_tokens(const struct clip_ctx * ctx, struct clip_image_f32 * img) { const auto & params = ctx->model.hparams; - // only for models using fixed size square images - int n_patches_sq = (params.image_size / params.patch_size) * (params.image_size / params.patch_size); + // for models with fixed size image, the input image is already pre-processed and resized to square + int patch_size = params.patch_size; + int n_patches = (img->nx / patch_size) * (img->ny / patch_size); projector_type proj = ctx->proj_type(); @@ -3549,89 +3664,85 @@ int clip_n_output_tokens(const struct clip_ctx * ctx, struct clip_image_f32 * im case PROJECTOR_TYPE_LDPV2: case PROJECTOR_TYPE_GLM_EDGE: { - n_patches_sq /= 4; + n_patches /= 4; if (ctx->model.mm_glm_tok_boi) { - n_patches_sq += 2; // for BOI and EOI token embeddings + n_patches += 2; // for BOI and EOI token embeddings } } break; case PROJECTOR_TYPE_MINICPMV: { - if (params.minicpmv_version == 2) { - // MiniCPM-V 2.5 - n_patches_sq = 96; - } else if (params.minicpmv_version == 3) { - // MiniCPM-V 2.6 - n_patches_sq = 64; - } else if (params.minicpmv_version == 4) { - // MiniCPM-o 2.6 - n_patches_sq = 64; - } else if (params.minicpmv_version == 5) { - // MiniCPM-V 4.0 - n_patches_sq = 64; + // Use actual config value if available, otherwise fall back to hardcoded values + if (params.minicpmv_query_num > 0) { + n_patches = params.minicpmv_query_num; } else { - GGML_ABORT("Unknown minicpmv version"); + // Fallback to hardcoded values for legacy models + if (params.minicpmv_version == 2) { + n_patches = 96; + } else if (params.minicpmv_version == 3) { + n_patches = 64; + } else if (params.minicpmv_version == 4) { + n_patches = 64; + } else if (params.minicpmv_version == 5) { + // MiniCPM-V 4.0 + n_patches = 64; + } else { + GGML_ABORT("Unknown minicpmv version"); + } } } break; case PROJECTOR_TYPE_QWEN2VL: case PROJECTOR_TYPE_QWEN25VL: { - // dynamic size + // dynamic size (2 conv, so double patch size) int patch_size = params.patch_size * 2; int x_patch = img->nx / patch_size + (int)(img->nx % patch_size > 0); int y_patch = img->ny / patch_size + (int)(img->ny % patch_size > 0); - n_patches_sq = x_patch * y_patch; + n_patches = x_patch * y_patch; } break; case PROJECTOR_TYPE_GEMMA3: - { - int n_per_side = params.image_size / params.patch_size; - int n_per_side_2d_pool = n_per_side / params.proj_scale_factor; - n_patches_sq = n_per_side_2d_pool * n_per_side_2d_pool; - } break; case PROJECTOR_TYPE_IDEFICS3: case PROJECTOR_TYPE_INTERNVL: + case PROJECTOR_TYPE_LLAMA4: + case PROJECTOR_TYPE_LFM2: { // both W and H are divided by proj_scale_factor - n_patches_sq /= (params.proj_scale_factor * params.proj_scale_factor); + int scale_factor = ctx->model.hparams.proj_scale_factor; + n_patches /= (scale_factor * scale_factor); } break; case PROJECTOR_TYPE_PIXTRAL: { // dynamic size int n_merge = params.spatial_merge_size; - int n_patches_x = img->nx / params.patch_size / (n_merge > 0 ? n_merge : 1); - int n_patches_y = img->ny / params.patch_size / (n_merge > 0 ? n_merge : 1); - n_patches_sq = n_patches_y * n_patches_x + n_patches_y - 1; // + one [IMG_BREAK] per row, except the last row - } break; - case PROJECTOR_TYPE_LLAMA4: - { - int scale_factor = ctx->model.hparams.proj_scale_factor; - n_patches_sq /= (scale_factor * scale_factor); + int n_patches_x = img->nx / patch_size / (n_merge > 0 ? n_merge : 1); + int n_patches_y = img->ny / patch_size / (n_merge > 0 ? n_merge : 1); + n_patches = n_patches_y * n_patches_x + n_patches_y - 1; // + one [IMG_BREAK] per row, except the last row } break; case PROJECTOR_TYPE_VOXTRAL: case PROJECTOR_TYPE_ULTRAVOX: case PROJECTOR_TYPE_QWEN2A: { - n_patches_sq = img->nx; + n_patches = img->nx; const int proj_stack_factor = ctx->model.hparams.proj_stack_factor; if (ctx->model.audio_has_stack_frames()) { GGML_ASSERT(proj_stack_factor > 0); - const int n_len = CLIP_ALIGN(n_patches_sq, proj_stack_factor); - n_patches_sq = n_len / proj_stack_factor; + const int n_len = CLIP_ALIGN(n_patches, proj_stack_factor); + n_patches = n_len / proj_stack_factor; } // whisper downscales input token by half after conv1d - n_patches_sq /= 2; + n_patches /= 2; if (ctx->model.audio_has_avgpool()) { // divide by 2 because of nn.AvgPool1d(2, stride=2) - n_patches_sq /= 2; + n_patches /= 2; } } break; default: GGML_ABORT("unsupported projector type"); } - return n_patches_sq; + return n_patches; } static std::vector>> get_1d_sincos_pos_embed_from_grid_new(int embed_dim, const std::vector> & pos) { @@ -4031,6 +4142,7 @@ bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_ima case PROJECTOR_TYPE_INTERNVL: case PROJECTOR_TYPE_QWEN2A: case PROJECTOR_TYPE_ULTRAVOX: + case PROJECTOR_TYPE_LFM2: case PROJECTOR_TYPE_VOXTRAL: { // do nothing @@ -4102,7 +4214,6 @@ bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_ima } int clip_n_mmproj_embd(const struct clip_ctx * ctx) { - const auto & hparams = ctx->model.hparams; switch (ctx->model.proj_type) { case PROJECTOR_TYPE_LDP: return ctx->model.mm_model_block_1_block_2_1_b->ne[0]; @@ -4114,20 +4225,7 @@ int clip_n_mmproj_embd(const struct clip_ctx * ctx) { case PROJECTOR_TYPE_MLP_NORM: return ctx->model.mm_3_b->ne[0]; case PROJECTOR_TYPE_MINICPMV: - if (hparams.minicpmv_version == 2) { - // MiniCPM-V 2.5 - return 4096; - } else if (hparams.minicpmv_version == 3) { - // MiniCPM-V 2.6 - return 3584; - } else if (hparams.minicpmv_version == 4) { - // MiniCPM-o 2.6 - return 3584; - } else if (hparams.minicpmv_version == 5) { - // MiniCPM-V 4.0 - return 2560; - } - GGML_ABORT("Unknown minicpmv version"); + return ctx->model.mm_model_proj->ne[0]; case PROJECTOR_TYPE_GLM_EDGE: return ctx->model.mm_model_mlp_3_w->ne[1]; case PROJECTOR_TYPE_QWEN2VL: @@ -4146,6 +4244,8 @@ int clip_n_mmproj_embd(const struct clip_ctx * ctx) { return ctx->model.mm_model_proj->ne[1]; case PROJECTOR_TYPE_QWEN2A: return ctx->model.mm_fc_w->ne[1]; + case PROJECTOR_TYPE_LFM2: + return ctx->model.mm_2_w->ne[1]; default: GGML_ABORT("Unknown projector type"); } diff --git a/tools/mtmd/clip.h b/tools/mtmd/clip.h index 08f3efb7b1..3387cdbd36 100644 --- a/tools/mtmd/clip.h +++ b/tools/mtmd/clip.h @@ -82,11 +82,6 @@ struct clip_image_f32 * clip_image_f32_get_img(const struct clip_image_f32_batch */ void clip_build_img_from_pixels(const unsigned char * rgb_pixels, int nx, int ny, struct clip_image_u8 * img); -bool clip_image_load_from_file(const char * fname, struct clip_image_u8 * img); - -/** interpret bytes as an image file with length bytes_length, and use the result to populate img */ -bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length, struct clip_image_u8 * img); - /** preprocess img and store the result in res_imgs, pad_to_square may be overridden to false depending on model configuration */ bool clip_image_preprocess(struct clip_ctx * ctx, const struct clip_image_u8 * img, struct clip_image_f32_batch * res_imgs ); diff --git a/tools/mtmd/legacy-models/minicpmv-convert-image-encoder-to-gguf.py b/tools/mtmd/legacy-models/minicpmv-convert-image-encoder-to-gguf.py index 3c60209544..4dda60a211 100644 --- a/tools/mtmd/legacy-models/minicpmv-convert-image-encoder-to-gguf.py +++ b/tools/mtmd/legacy-models/minicpmv-convert-image-encoder-to-gguf.py @@ -517,6 +517,16 @@ if args.use_f32: # output in the same directory as the model if output_dir is None dir_model = args.model_dir +# Read config.json to get actual model configuration +config_path = os.path.join(dir_model, "config.json") +model_config = {} +if os.path.isfile(config_path): + with open(config_path, "r", encoding="utf-8") as f: + model_config = json.load(f) + print(f"Loaded config from {config_path}") +else: + print(f"Warning: config.json not found at {config_path}") + # If minicpmv_projector is not specified but the default path exists, use the default path if args.minicpmv_projector is None: default_projector_path = os.path.join(dir_model, "minicpmv.projector") @@ -555,37 +565,62 @@ if args.use_f32: # processor = CLIPProcessor.from_pretrained(dir_model) minicpmv_version = args.minicpmv_version -emb_dim = 4096 -block_count = 26 -if minicpmv_version == 1: # MiniCPM-V 2.0 - emb_dim = 2304 - block_count = 26 -elif minicpmv_version == 2: # MiniCPM-V 2.5 - emb_dim = 4096 - block_count = 27 -elif minicpmv_version == 3: # MiniCPM-V 2.6 - emb_dim = 3584 - block_count = 27 -elif minicpmv_version == 4: # MiniCPM-o 2.6 - emb_dim = 3584 - block_count = 27 -elif minicpmv_version == 5: # MiniCPM-V 4.0 - emb_dim = 2560 - block_count = 27 -default_vision_config = { - "hidden_size": 1152, - "image_size": 980, - "intermediate_size": 4304, - "model_type": "idefics2", - "num_attention_heads": 16, - "num_hidden_layers": 27, - "patch_size": 14, +# Use actual config values instead of hardcoded ones +if model_config: + # For the projector/resampler, use the main model's hidden_size + emb_dim = model_config.get("hidden_size", 1536) + + # For the vision model, use vision_config values + vision_config_dict = model_config.get("vision_config", {}) + default_vision_config = { + "hidden_size": vision_config_dict.get("hidden_size", 1152), + "image_size": vision_config_dict.get("image_size", 980), + "intermediate_size": vision_config_dict.get("intermediate_size", 4304), + "model_type": vision_config_dict.get("model_type", "siglip"), + "num_attention_heads": vision_config_dict.get("num_attention_heads", 16), + "num_hidden_layers": vision_config_dict.get("num_hidden_layers", 27), + "patch_size": vision_config_dict.get("patch_size", 14), } + # Use vision model's num_hidden_layers for block_count + block_count = vision_config_dict.get("num_hidden_layers", 27) + + print(f"Using config values: emb_dim={emb_dim}, block_count={block_count}") + print(f"Vision config: {default_vision_config}") +else: + # Fallback to original hardcoded logic if config.json not found + emb_dim = 4096 + block_count = 26 + if minicpmv_version == 1: + emb_dim = 2304 + block_count = 26 + elif minicpmv_version == 2: + emb_dim = 4096 + block_count = 27 + elif minicpmv_version == 3: + emb_dim = 3584 + block_count = 27 + elif minicpmv_version == 4: + emb_dim = 3584 + block_count = 27 + elif minicpmv_version == 5: + emb_dim = 2560 + block_count = 27 + + default_vision_config = { + "hidden_size": 1152, + "image_size": 980, + "intermediate_size": 4304, + "model_type": "idefics2", + "num_attention_heads": 16, + "num_hidden_layers": 27, + "patch_size": 14, + } + vision_config = Idefics2VisionConfig(**default_vision_config) model = Idefics2VisionTransformer(vision_config) -if minicpmv_version == 3: +if minicpmv_version == 3 or (model_config and model_config.get("vision_config", {}).get("model_type") == "siglip"): vision_config = SiglipVisionConfig(**default_vision_config) model = SiglipVisionTransformer(vision_config) elif minicpmv_version == 4: @@ -644,16 +679,27 @@ else: fout.add_description("two-tower CLIP model") if has_vision_encoder: - # vision_model hparams - fout.add_uint32("clip.vision.image_size", 448) - fout.add_uint32("clip.vision.patch_size", 14) - fout.add_uint32(add_key_str(KEY_EMBEDDING_LENGTH, VISION), 1152) - fout.add_uint32(add_key_str(KEY_FEED_FORWARD_LENGTH, VISION), 4304) + # vision_model hparams - use actual config values + vision_image_size = model_config.get("image_size", 448) if model_config else 448 + vision_patch_size = default_vision_config.get("patch_size", 14) + vision_hidden_size = default_vision_config.get("hidden_size", 1152) + vision_intermediate_size = default_vision_config.get("intermediate_size", 4304) + vision_attention_heads = default_vision_config.get("num_attention_heads", 16) + + fout.add_uint32("clip.vision.image_size", vision_image_size) + fout.add_uint32("clip.vision.patch_size", vision_patch_size) + fout.add_uint32(add_key_str(KEY_EMBEDDING_LENGTH, VISION), vision_hidden_size) + fout.add_uint32(add_key_str(KEY_FEED_FORWARD_LENGTH, VISION), vision_intermediate_size) fout.add_uint32("clip.vision.projection_dim", 0) - fout.add_uint32(add_key_str(KEY_ATTENTION_HEAD_COUNT, VISION), 16) + fout.add_uint32(add_key_str(KEY_ATTENTION_HEAD_COUNT, VISION), vision_attention_heads) fout.add_float32(add_key_str(KEY_ATTENTION_LAYERNORM_EPS, VISION), 1e-6) fout.add_uint32(add_key_str(KEY_BLOCK_COUNT, VISION), block_count) + # Add MiniCPM-V specific parameters + query_num = model_config.get("query_num", 0) if model_config else 0 + resampler_emb_dim = model_config.get("hidden_size", 0) if model_config else 0 + fout.add_uint32("clip.minicpmv_query_num", query_num) + if processor is not None: image_mean = processor.image_processor.image_mean if args.image_mean is None or args.image_mean == default_image_mean else args.image_mean image_std = processor.image_processor.image_std if args.image_std is None or args.image_std == default_image_std else args.image_std diff --git a/tools/mtmd/legacy-models/minicpmv-surgery.py b/tools/mtmd/legacy-models/minicpmv-surgery.py index ba82116582..53526623cd 100644 --- a/tools/mtmd/legacy-models/minicpmv-surgery.py +++ b/tools/mtmd/legacy-models/minicpmv-surgery.py @@ -16,6 +16,8 @@ mm_tensors = [k for k, v in checkpoint.items() if k.startswith("resampler")] # store these tensors in a new dictionary and torch.save them projector = {name: checkpoint[name].float() for name in mm_tensors} +if 'resampler.proj' in projector.keys() and hasattr(model.llm.config,'scale_emb') is True: + projector['resampler.proj'] = projector['resampler.proj'] / model.llm.config.scale_emb torch.save(projector, f"{args.model}/minicpmv.projector") clip_tensors = [k for k, v in checkpoint.items() if k.startswith("vpm")] diff --git a/tools/mtmd/mtmd-cli.cpp b/tools/mtmd/mtmd-cli.cpp index 599e682e0f..5fde6ca0c3 100644 --- a/tools/mtmd/mtmd-cli.cpp +++ b/tools/mtmd/mtmd-cli.cpp @@ -108,7 +108,7 @@ struct mtmd_cli_context { } tmpls = common_chat_templates_init(model, params.chat_template); - LOG_INF("%s: chat template example:\n%s\n", __func__, common_chat_format_example(tmpls.get(), params.use_jinja).c_str()); + LOG_INF("%s: chat template example:\n%s\n", __func__, common_chat_format_example(tmpls.get(), params.use_jinja, params.default_template_kwargs).c_str()); init_vision_context(params); diff --git a/tools/mtmd/tests.sh b/tools/mtmd/tests.sh index e73cf96af2..6f8a5f86ac 100755 --- a/tools/mtmd/tests.sh +++ b/tools/mtmd/tests.sh @@ -68,6 +68,7 @@ add_test_vision "ggml-org/Qwen2.5-VL-3B-Instruct-GGUF:Q4_K_M" add_test_vision "ggml-org/InternVL2_5-1B-GGUF:Q8_0" add_test_vision "ggml-org/InternVL3-1B-Instruct-GGUF:Q8_0" add_test_vision "ggml-org/Qwen2.5-Omni-3B-GGUF:Q4_K_M" +add_test_vision "ggml-org/LFM2-VL-450M-GGUF:Q8_0" add_test_audio "ggml-org/ultravox-v0_5-llama-3_2-1b-GGUF:Q8_0" add_test_audio "ggml-org/Qwen2.5-Omni-3B-GGUF:Q4_K_M" diff --git a/tools/perplexity/perplexity.cpp b/tools/perplexity/perplexity.cpp index 189dcb3d72..80cbb095da 100644 --- a/tools/perplexity/perplexity.cpp +++ b/tools/perplexity/perplexity.cpp @@ -525,7 +525,7 @@ static results_perplexity perplexity(llama_context * ctx, const common_params & } // We get the logits for all the tokens in the context window (params.n_ctx) - // from llama_eval above. Now, based on https://huggingface.co/docs/transformers/perplexity, + // from llama_decode below. Now, based on https://huggingface.co/docs/transformers/perplexity, // calculate the perplexity over the last half of the window (so the model always has // some context to predict the token). // @@ -559,7 +559,7 @@ static results_perplexity perplexity(llama_context * ctx, const common_params & for (int seq = 0; seq < n_seq_batch; seq++) { int seq_start = batch_start + seq*n_ctx; - // save original token and restore it after eval + // save original token and restore it after decode const auto token_org = tokens[seq_start]; // add BOS token for the first batch of each chunk @@ -584,7 +584,7 @@ static results_perplexity perplexity(llama_context * ctx, const common_params & } if (llama_decode(ctx, batch)) { - LOG_INF("%s : failed to eval\n", __func__); + LOG_INF("%s : failed to decode\n", __func__); return {tokens, -1, logit_history, prob_history}; } @@ -920,7 +920,7 @@ static void hellaswag_score(llama_context * ctx, const common_params & params) { } if (i0 == i1) { - LOG_ERR("%s : task %zu does not fit in the context window\n", __func__, i0); + LOG_ERR("%s : task %zu does not fit in the context window (requires %lu tokens)\n", __func__, i0, hs_data[i0].required_tokens); return; } @@ -1213,7 +1213,7 @@ static void winogrande_score(llama_context * ctx, const common_params & params) } if (i0 == i1) { - LOG_ERR("%s : task %zu does not fit in the context window\n", __func__, i0); + LOG_ERR("%s : task %zu does not fit in the context window (requires %lu tokens)\n", __func__, i0, data[i0].required_tokens); return; } @@ -1548,6 +1548,10 @@ static void multiple_choice_score(llama_context * ctx, const common_params & par int num_answers = cur_task.seq_tokens.size(); if (s0 + num_answers > max_seq) { + if (s0 == 0) { + LOG_ERR("%s : task %zu requires a higher -np|--parallel value (at least %d)\n", __func__, i0, num_answers); + return; + } break; } @@ -1588,7 +1592,7 @@ static void multiple_choice_score(llama_context * ctx, const common_params & par } if (i0 == i1) { - LOG_ERR("%s : task %zu does not fit in the context window\n", __func__, i0); + LOG_ERR("%s : task %zu does not fit in the context window (requires %lu tokens)\n", __func__, i0, tasks[i0].required_tokens); return; } diff --git a/tools/server/README.md b/tools/server/README.md index 87cef75730..af9264ddd3 100644 --- a/tools/server/README.md +++ b/tools/server/README.md @@ -1132,6 +1132,12 @@ The `response_format` parameter supports both plain JSON output (e.g. `{"type": `chat_template_kwargs`: Allows sending additional parameters to the json templating system. For example: `{"enable_thinking": false}` +`reasoning_format`: The reasoning format to be parsed. If set to `none`, it will output the raw generated text. + +`thinking_forced_open`: Force a reasoning model to always output the reasoning. Only works on certain models. + +`parse_tool_calls`: Whether to parse the generated tool call. + *Examples:* You can use either Python `openai` library with appropriate checkpoints: diff --git a/tools/server/public/index.html.gz b/tools/server/public/index.html.gz index 4e25eef176..fad36ed339 100644 Binary files a/tools/server/public/index.html.gz and b/tools/server/public/index.html.gz differ diff --git a/tools/server/server.cpp b/tools/server/server.cpp index a255d481a4..ab88f3d26c 100644 --- a/tools/server/server.cpp +++ b/tools/server/server.cpp @@ -383,8 +383,12 @@ struct server_task { } else { params.oaicompat_chat_syntax.format = defaults.oaicompat_chat_syntax.format; } - params.oaicompat_chat_syntax.reasoning_format = params_base.reasoning_format; - params.oaicompat_chat_syntax.reasoning_in_content = params.stream && (params_base.reasoning_format == COMMON_REASONING_FORMAT_DEEPSEEK_LEGACY); + common_reasoning_format reasoning_format = params_base.reasoning_format; + if (data.contains("reasoning_format")) { + reasoning_format = common_reasoning_format_from_name(data.at("reasoning_format").get()); + } + params.oaicompat_chat_syntax.reasoning_format = reasoning_format; + params.oaicompat_chat_syntax.reasoning_in_content = params.stream && (reasoning_format == COMMON_REASONING_FORMAT_DEEPSEEK_LEGACY); params.oaicompat_chat_syntax.thinking_forced_open = json_value(data, "thinking_forced_open", false); params.oaicompat_chat_syntax.parse_tool_calls = json_value(data, "parse_tool_calls", false); } @@ -688,6 +692,13 @@ struct completion_token_output { } }; +struct swa_checkpoint { + llama_pos pos_min; + llama_pos pos_max; + + std::vector data; +}; + struct server_task_result_cmpl_final : server_task_result { int index = 0; @@ -1190,6 +1201,8 @@ struct server_task_result_metrics : server_task_result { uint64_t n_tokens_predicted_total = 0; uint64_t t_tokens_generation_total = 0; + uint64_t n_past_max = 0; + uint64_t n_prompt_tokens_processed = 0; uint64_t t_prompt_processing = 0; @@ -1215,6 +1228,8 @@ struct server_task_result_metrics : server_task_result { { "n_tokens_predicted_total", n_tokens_predicted_total }, { "t_prompt_processing_total", t_prompt_processing_total }, + { "n_past_max", n_past_max }, + { "n_prompt_tokens_processed", n_prompt_tokens_processed }, { "t_prompt_processing", t_prompt_processing }, { "n_tokens_predicted", n_tokens_predicted }, @@ -1332,6 +1347,8 @@ struct server_slot { std::vector generated_token_probs; + std::vector swa_checkpoints; + bool has_next_token = true; bool has_new_line = false; bool truncated = false; @@ -1574,6 +1591,8 @@ struct server_metrics { uint64_t n_tokens_predicted_total = 0; uint64_t t_tokens_generation_total = 0; + uint64_t n_past_max = 0; + uint64_t n_prompt_tokens_processed = 0; uint64_t t_prompt_processing = 0; @@ -1592,6 +1611,10 @@ struct server_metrics { n_prompt_tokens_processed += slot.n_prompt_tokens_processed; t_prompt_processing += slot.t_prompt_processing; t_prompt_processing_total += slot.t_prompt_processing; + + if (slot.n_past > 0) { + n_past_max = std::max(n_past_max, (uint64_t) slot.n_past); + } } void on_prediction(const server_slot & slot) { @@ -1607,6 +1630,9 @@ struct server_metrics { if (slot.is_processing()) { n_busy_slots_total++; } + if (slot.n_past > 0) { + n_past_max = std::max(n_past_max, (uint64_t) slot.n_past); + } } } @@ -1703,7 +1729,7 @@ struct server_queue { void pop_deferred_task() { std::unique_lock lock(mutex_tasks); if (!queue_tasks_deferred.empty()) { - queue_tasks.emplace_back(std::move(queue_tasks_deferred.front())); + queue_tasks.emplace_front(std::move(queue_tasks_deferred.front())); queue_tasks_deferred.pop_front(); } condition_tasks.notify_one(); @@ -2011,6 +2037,10 @@ struct server_context { params_dft.cache_type_k = params_base.speculative.cache_type_k; params_dft.cache_type_v = params_base.speculative.cache_type_v; + params_dft.cpuparams.n_threads = params_base.speculative.cpuparams.n_threads; + params_dft.cpuparams_batch.n_threads = params_base.speculative.cpuparams_batch.n_threads; + params_dft.tensor_buft_overrides = params_base.speculative.tensor_buft_overrides; + llama_init_dft = common_init_from_params(params_dft); model_dft = llama_init_dft.model.get(); @@ -2036,7 +2066,7 @@ struct server_context { chat_templates = common_chat_templates_init(model, params_base.chat_template); try { - common_chat_format_example(chat_templates.get(), params.use_jinja); + common_chat_format_example(chat_templates.get(), params.use_jinja, params.default_template_kwargs); } catch (const std::exception & e) { SRV_WRN("%s: Chat template parsing error: %s\n", __func__, e.what()); SRV_WRN("%s: The chat template that comes with this model is not yet supported, falling back to chatml. This may cause the model to output suboptimal responses\n", __func__); @@ -2858,6 +2888,8 @@ struct server_context { res->n_tokens_predicted_total = metrics.n_tokens_predicted_total; res->t_tokens_generation_total = metrics.t_tokens_generation_total; + res->n_past_max = metrics.n_past_max; + res->n_prompt_tokens_processed = metrics.n_prompt_tokens_processed; res->t_prompt_processing = metrics.t_prompt_processing; res->n_tokens_predicted = metrics.n_tokens_predicted; @@ -3285,6 +3317,8 @@ struct server_context { slot.n_past = 0; } + const auto n_swa = llama_model_n_swa(model); + if (slot.n_past > 0 && slot.n_past < (int) slot.cache_tokens.size()) { const auto pos_min = llama_memory_seq_pos_min(llama_get_memory(ctx), slot.id); if (pos_min == -1) { @@ -3292,12 +3326,58 @@ struct server_context { GGML_ABORT("pos_min == -1, but n_past > 0 - should not happen: https://github.com/ggml-org/llama.cpp/pull/13833#discussion_r2116181237"); } - const auto n_swa = llama_model_n_swa(model); - if (pos_min > std::max(0, slot.n_past - n_swa)) { + const auto pos_min_thold = std::max(0, slot.n_past - n_swa); + + if (pos_min > pos_min_thold) { SLT_WRN(slot, "n_past = %d, cache_tokens.size() = %d, seq_id = %d, pos_min = %d, n_swa = %d\n", slot.n_past, (int) slot.cache_tokens.size(), slot.id, pos_min, n_swa); - SLT_WRN(slot, "forcing full prompt re-processing due to lack of cache data (likely due to SWA, see %s)\n", - "https://github.com/ggml-org/llama.cpp/pull/13194#issuecomment-2868343055"); - slot.n_past = 0; + + // search for a SWA checkpoint + const auto it = std::find_if( + slot.swa_checkpoints.rbegin(), + slot.swa_checkpoints.rend(), + [&](const auto & cur) { + return cur.pos_min <= pos_min_thold; + } + ); + + bool do_reset = it == slot.swa_checkpoints.rend(); + + if (!do_reset) { + // restore the checkpoint + const size_t swa_size = it->data.size(); + const size_t n = llama_state_seq_set_data_ext(ctx, it->data.data(), swa_size, slot.id, LLAMA_STATE_SEQ_FLAGS_SWA_ONLY); + + if (n != swa_size) { + SLT_ERR(slot, "failed to restore SWA checkpoint, pos_min = %d, pos_max = %d, size = %.3f MiB\n", it->pos_min, it->pos_max, (float) swa_size / 1024 / 1024); + do_reset = true; + } else { + slot.n_past = std::min(slot.n_past, it->pos_max); + + SLT_WRN(slot, "SWA checkpoint restore, pos_min = %d, pos_max = %d, size = %.3f MiB\n", it->pos_min, it->pos_max, (float) swa_size / 1024 / 1024); + } + } + + if (do_reset) { + SLT_WRN(slot, "forcing full prompt re-processing due to lack of cache data (likely due to SWA, see %s)\n", + "https://github.com/ggml-org/llama.cpp/pull/13194#issuecomment-2868343055"); + + slot.n_past = 0; + slot.swa_checkpoints.clear(); + } + } + } + + if (n_swa > 0) { + const auto pos_min_thold = std::max(0, slot.n_past - n_swa); + + // erase any checkpoints with pos_min > pos_min_thold + for (int i = (int) slot.swa_checkpoints.size() - 1; i >= 0; i--) { + const auto & cur = slot.swa_checkpoints[i]; + if (cur.pos_min > pos_min_thold) { + slot.swa_checkpoints.erase(slot.swa_checkpoints.begin() + i); + + SLT_WRN(slot, "SWA checkpoint erase, pos_min = %d, pos_max = %d, size = %.3f MiB\n", cur.pos_min, cur.pos_max, (float) cur.data.size() / 1024 / 1024); + } } } } @@ -3511,6 +3591,39 @@ struct server_context { // prompt evaluated for next-token prediction slot.state = SLOT_STATE_GENERATING; + + // make a checkpoint with the SWA memory + // checkpoints are needed only if we are not using "--swa-full" + if (llama_model_n_swa(model) > 0 && !params_base.swa_full && params_base.n_swa_checkpoints > 0) { + if (slot.swa_checkpoints.size() >= (size_t) params_base.n_swa_checkpoints) { + { + const auto & cur = slot.swa_checkpoints.back(); + + SLT_WRN(slot, "SWA checkpoint erase, pos_min = %d, pos_max = %d, size = %.3f MiB\n", + cur.pos_min, cur.pos_max, (float) cur.data.size() / 1024 / 1024); + } + + slot.swa_checkpoints.erase(slot.swa_checkpoints.begin()); + } + + const size_t swa_size = llama_state_seq_get_size_ext(ctx, slot.id, LLAMA_STATE_SEQ_FLAGS_SWA_ONLY); + + auto & cur = slot.swa_checkpoints.emplace_back(swa_checkpoint{ + /*.pos_min = */ llama_memory_seq_pos_min(llama_get_memory(ctx), slot.id), + /*.pos_max = */ llama_memory_seq_pos_max(llama_get_memory(ctx), slot.id), + /*.data = */ std::vector(swa_size), + }); + + llama_state_seq_get_data_ext(ctx, cur.data.data(), swa_size, slot.id, LLAMA_STATE_SEQ_FLAGS_SWA_ONLY); + + float size_total = 0.0f; + for (const auto & checkpoint : slot.swa_checkpoints) { + size_total += (float) checkpoint.data.size() / 1024 / 1024; + } + + SLT_WRN(slot, "SWA checkpoint create, pos_min = %d, pos_max = %d, size = %.3f MiB, total = %d/%d (%.3f MiB)\n", + cur.pos_min, cur.pos_max, (float) cur.data.size() / 1024 / 1024, (int) slot.swa_checkpoints.size(), params_base.n_swa_checkpoints, size_total); + } } else if (slot.state != SLOT_STATE_GENERATING) { continue; // continue loop of slots } @@ -3979,6 +4092,10 @@ int main(int argc, char ** argv) { {"name", "n_decode_total"}, {"help", "Total number of llama_decode() calls"}, {"value", res_metrics->n_decode_total} + }, { + {"name", "n_past_max"}, + {"help", "Largest observed n_past."}, + {"value", res_metrics->n_past_max} }, { {"name", "n_busy_slots_per_decode"}, {"help", "Average number of busy slots per llama_decode() call"}, @@ -4977,7 +5094,7 @@ int main(int argc, char ** argv) { // print sample chat example to make it clear which template is used LOG_INF("%s: chat template, chat_template: %s, example_format: '%s'\n", __func__, common_chat_templates_source(ctx_server.chat_templates.get()), - common_chat_format_example(ctx_server.chat_templates.get(), ctx_server.params_base.use_jinja).c_str()); + common_chat_format_example(ctx_server.chat_templates.get(), ctx_server.params_base.use_jinja, ctx_server.params_base.default_template_kwargs).c_str()); ctx_server.queue_tasks.on_new_task([&ctx_server](server_task && task) { ctx_server.process_single_task(std::move(task)); diff --git a/tools/server/tests/unit/test_basic.py b/tools/server/tests/unit/test_basic.py index 1485de8ceb..c7b3af0489 100644 --- a/tools/server/tests/unit/test_basic.py +++ b/tools/server/tests/unit/test_basic.py @@ -5,7 +5,7 @@ from utils import * server = ServerPreset.tinyllama2() -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def create_server(): global server server = ServerPreset.tinyllama2() diff --git a/tools/server/tests/unit/test_completion.py b/tools/server/tests/unit/test_completion.py index be3a0052c6..adb6f27864 100644 --- a/tools/server/tests/unit/test_completion.py +++ b/tools/server/tests/unit/test_completion.py @@ -7,7 +7,7 @@ from utils import * server = ServerPreset.tinyllama2() -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def create_server(): global server server = ServerPreset.tinyllama2() @@ -229,7 +229,7 @@ def test_nocache_long_input_prompt(): "temperature": 1.0, "cache_prompt": False, }) - assert res.status_code == 200 + assert res.status_code == 400 def test_completion_with_tokens_input(): diff --git a/tools/server/tests/unit/test_ctx_shift.py b/tools/server/tests/unit/test_ctx_shift.py index 2431ac7088..8f51bc301a 100644 --- a/tools/server/tests/unit/test_ctx_shift.py +++ b/tools/server/tests/unit/test_ctx_shift.py @@ -11,7 +11,7 @@ Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. """.strip() -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def create_server(): global server server = ServerPreset.tinyllama2() @@ -25,6 +25,7 @@ def test_ctx_shift_enabled(): # the prompt is truncated to keep the last 109 tokens # 64 tokens are generated thanks to shifting the context when it gets full global server + server.enable_ctx_shift = True server.start() res = server.make_request("POST", "/completion", data={ "n_predict": 64, @@ -42,7 +43,6 @@ def test_ctx_shift_enabled(): ]) def test_ctx_shift_disabled_short_prompt(n_predict: int, n_token_output: int, truncated: bool): global server - server.disable_ctx_shift = True server.n_predict = -1 server.start() res = server.make_request("POST", "/completion", data={ @@ -56,7 +56,6 @@ def test_ctx_shift_disabled_short_prompt(n_predict: int, n_token_output: int, tr def test_ctx_shift_disabled_long_prompt(): global server - server.disable_ctx_shift = True server.start() res = server.make_request("POST", "/completion", data={ "n_predict": 64, @@ -68,7 +67,6 @@ def test_ctx_shift_disabled_long_prompt(): def test_ctx_shift_disabled_stream(): global server - server.disable_ctx_shift = True server.start() res = server.make_stream_request("POST", "/v1/completions", data={ "n_predict": 256, diff --git a/tools/server/tests/unit/test_embedding.py b/tools/server/tests/unit/test_embedding.py index 0feb452ccf..50601b8396 100644 --- a/tools/server/tests/unit/test_embedding.py +++ b/tools/server/tests/unit/test_embedding.py @@ -8,7 +8,7 @@ server = ServerPreset.bert_bge_small() EPSILON = 1e-3 -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def create_server(): global server server = ServerPreset.bert_bge_small() diff --git a/tools/server/tests/unit/test_infill.py b/tools/server/tests/unit/test_infill.py index 10554db0f6..73dacdae81 100644 --- a/tools/server/tests/unit/test_infill.py +++ b/tools/server/tests/unit/test_infill.py @@ -3,7 +3,7 @@ from utils import * server = ServerPreset.tinyllama_infill() -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def create_server(): global server server = ServerPreset.tinyllama_infill() diff --git a/tools/server/tests/unit/test_lora.py b/tools/server/tests/unit/test_lora.py index c1aa8be70e..00b2f245f6 100644 --- a/tools/server/tests/unit/test_lora.py +++ b/tools/server/tests/unit/test_lora.py @@ -5,7 +5,7 @@ server = ServerPreset.stories15m_moe() LORA_FILE_URL = "https://huggingface.co/ggml-org/stories15M_MOE/resolve/main/moe_shakespeare15M.gguf" -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def create_server(): global server server = ServerPreset.stories15m_moe() diff --git a/tools/server/tests/unit/test_rerank.py b/tools/server/tests/unit/test_rerank.py index f4f570ad5e..0b63c7821e 100644 --- a/tools/server/tests/unit/test_rerank.py +++ b/tools/server/tests/unit/test_rerank.py @@ -4,7 +4,7 @@ from utils import * server = ServerPreset.jina_reranker_tiny() -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def create_server(): global server server = ServerPreset.jina_reranker_tiny() diff --git a/tools/server/tests/unit/test_security.py b/tools/server/tests/unit/test_security.py index 620b25376b..0e11580553 100644 --- a/tools/server/tests/unit/test_security.py +++ b/tools/server/tests/unit/test_security.py @@ -6,7 +6,7 @@ server = ServerPreset.tinyllama2() TEST_API_KEY = "sk-this-is-the-secret-key" -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def create_server(): global server server = ServerPreset.tinyllama2() diff --git a/tools/server/tests/unit/test_slot_save.py b/tools/server/tests/unit/test_slot_save.py index 38704f5ece..1b428cc2a8 100644 --- a/tools/server/tests/unit/test_slot_save.py +++ b/tools/server/tests/unit/test_slot_save.py @@ -3,7 +3,7 @@ from utils import * server = ServerPreset.tinyllama2() -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def create_server(): global server server = ServerPreset.tinyllama2() diff --git a/tools/server/tests/unit/test_speculative.py b/tools/server/tests/unit/test_speculative.py index 54db38cf3b..38ca4325ba 100644 --- a/tools/server/tests/unit/test_speculative.py +++ b/tools/server/tests/unit/test_speculative.py @@ -16,7 +16,7 @@ def create_server(): server.draft_max = 8 -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def fixture_create_server(): return create_server() @@ -91,6 +91,7 @@ def test_slot_ctx_not_exceeded(): def test_with_ctx_shift(): global server server.n_ctx = 64 + server.enable_ctx_shift = True server.start() res = server.make_request("POST", "/completion", data={ "prompt": "Hello " * 56, diff --git a/tools/server/tests/unit/test_tokenize.py b/tools/server/tests/unit/test_tokenize.py index 382457c9d6..424cac5f3d 100644 --- a/tools/server/tests/unit/test_tokenize.py +++ b/tools/server/tests/unit/test_tokenize.py @@ -4,7 +4,7 @@ from utils import * server = ServerPreset.tinyllama2() -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(autouse=True) def create_server(): global server server = ServerPreset.tinyllama2() diff --git a/tools/server/tests/unit/test_tool_call.py b/tools/server/tests/unit/test_tool_call.py index 20f048c6f6..a3c3ccdf58 100755 --- a/tools/server/tests/unit/test_tool_call.py +++ b/tools/server/tests/unit/test_tool_call.py @@ -22,6 +22,8 @@ def create_server(): server.model_alias = "tinyllama-2-tool-call" server.server_port = 8081 server.n_slots = 1 + server.n_ctx = 8192 + server.n_batch = 2048 class CompletionMode(Enum): NORMAL = "normal" diff --git a/tools/server/tests/utils.py b/tools/server/tests/utils.py index bc547ca03b..49277e6000 100644 --- a/tools/server/tests/utils.py +++ b/tools/server/tests/utils.py @@ -79,7 +79,7 @@ class ServerProcess: draft: int | None = None api_key: str | None = None lora_files: List[str] | None = None - disable_ctx_shift: int | None = False + enable_ctx_shift: int | None = False draft_min: int | None = None draft_max: int | None = None no_webui: bool | None = None @@ -178,8 +178,8 @@ class ServerProcess: if self.lora_files: for lora_file in self.lora_files: server_args.extend(["--lora", lora_file]) - if self.disable_ctx_shift: - server_args.extend(["--no-context-shift"]) + if self.enable_ctx_shift: + server_args.append("--context-shift") if self.api_key: server_args.extend(["--api-key", self.api_key]) if self.draft_max: diff --git a/tools/server/webui/src/components/ChatMessage.tsx b/tools/server/webui/src/components/ChatMessage.tsx index 1c579f2659..a1a7d152f9 100644 --- a/tools/server/webui/src/components/ChatMessage.tsx +++ b/tools/server/webui/src/components/ChatMessage.tsx @@ -62,8 +62,7 @@ export default function ChatMessage({ return { content: msg.content }; } const REGEX_THINK_OPEN = /|<\|channel\|>analysis<\|message\|>/; - const REGEX_THINK_CLOSE = - /<\/think>|<\|start\|>assistant<\|channel\|>final<\|message\|>/; + const REGEX_THINK_CLOSE = /<\/think>|<\|end\|>/; let actualContent = ''; let thought = ''; let isThinking = false; diff --git a/tools/server/webui/src/utils/app.context.tsx b/tools/server/webui/src/utils/app.context.tsx index 96cffd95ab..0b95e88f89 100644 --- a/tools/server/webui/src/utils/app.context.tsx +++ b/tools/server/webui/src/utils/app.context.tsx @@ -209,6 +209,7 @@ export const AppContextProvider = ({ messages, stream: true, cache_prompt: true, + reasoning_format: 'none', samplers: config.samplers, temperature: config.temperature, dynatemp_range: config.dynatemp_range, diff --git a/tools/server/webui/src/utils/misc.ts b/tools/server/webui/src/utils/misc.ts index d60a68cd24..15182e4cab 100644 --- a/tools/server/webui/src/utils/misc.ts +++ b/tools/server/webui/src/utils/misc.ts @@ -130,7 +130,10 @@ export function filterThoughtFromMsgs(messages: APIMessage[]) { role: msg.role, content: msg.role === 'assistant' - ? contentStr.split('').at(-1)!.trim() + ? contentStr + .split(/<\/think>|<\|end\|>/) + .at(-1)! + .trim() : contentStr, } as APIMessage; }); diff --git a/tools/tts/tts.cpp b/tools/tts/tts.cpp index a71e9bf5b5..18f01a9946 100644 --- a/tools/tts/tts.cpp +++ b/tools/tts/tts.cpp @@ -581,7 +581,6 @@ int main(int argc, char ** argv) { params.model = params.vocoder.model; params.embedding = true; - params.ctx_shift = false; // silence warning params.n_ubatch = params.n_batch; common_init_result llama_init_cts = common_init_from_params(params);